var/home/core/zuul-output/0000755000175000017500000000000015113234164014525 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015113243405015467 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004560670715113243400017705 0ustar rootrootDec 01 06:43:09 crc systemd[1]: Starting Kubernetes Kubelet... Dec 01 06:43:09 crc restorecon[4565]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:09 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 01 06:43:10 crc restorecon[4565]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 01 06:43:10 crc kubenswrapper[4632]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.619654 4632 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621959 4632 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621975 4632 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621979 4632 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621983 4632 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621986 4632 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621990 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621994 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.621998 4632 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622002 4632 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622005 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622009 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622014 4632 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622018 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622021 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622024 4632 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622029 4632 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622033 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622040 4632 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622043 4632 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622047 4632 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622050 4632 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622053 4632 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622057 4632 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622061 4632 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622065 4632 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622068 4632 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622071 4632 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622074 4632 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622078 4632 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622081 4632 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622084 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622087 4632 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622090 4632 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622094 4632 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622097 4632 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622100 4632 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622103 4632 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622107 4632 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622111 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622115 4632 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622120 4632 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622124 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622127 4632 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622130 4632 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622141 4632 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622145 4632 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622149 4632 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622152 4632 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622156 4632 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622160 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622163 4632 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622168 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622172 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622175 4632 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622179 4632 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622182 4632 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622185 4632 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622188 4632 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622192 4632 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622195 4632 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622198 4632 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622201 4632 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622204 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622211 4632 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622214 4632 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622217 4632 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622220 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622224 4632 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622227 4632 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622230 4632 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.622233 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622594 4632 flags.go:64] FLAG: --address="0.0.0.0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622607 4632 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622617 4632 flags.go:64] FLAG: --anonymous-auth="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622622 4632 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622627 4632 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622637 4632 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622642 4632 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622647 4632 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622651 4632 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622654 4632 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622658 4632 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622662 4632 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622666 4632 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622669 4632 flags.go:64] FLAG: --cgroup-root="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622672 4632 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622676 4632 flags.go:64] FLAG: --client-ca-file="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622680 4632 flags.go:64] FLAG: --cloud-config="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622684 4632 flags.go:64] FLAG: --cloud-provider="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622687 4632 flags.go:64] FLAG: --cluster-dns="[]" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622696 4632 flags.go:64] FLAG: --cluster-domain="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622699 4632 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622703 4632 flags.go:64] FLAG: --config-dir="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622709 4632 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622713 4632 flags.go:64] FLAG: --container-log-max-files="5" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622717 4632 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622721 4632 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622724 4632 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622728 4632 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622732 4632 flags.go:64] FLAG: --contention-profiling="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622736 4632 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622740 4632 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622743 4632 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622747 4632 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622752 4632 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622755 4632 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622759 4632 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622763 4632 flags.go:64] FLAG: --enable-load-reader="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622766 4632 flags.go:64] FLAG: --enable-server="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622770 4632 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622777 4632 flags.go:64] FLAG: --event-burst="100" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622781 4632 flags.go:64] FLAG: --event-qps="50" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622789 4632 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622793 4632 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622797 4632 flags.go:64] FLAG: --eviction-hard="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622801 4632 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622804 4632 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622808 4632 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622812 4632 flags.go:64] FLAG: --eviction-soft="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622816 4632 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622820 4632 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622830 4632 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622834 4632 flags.go:64] FLAG: --experimental-mounter-path="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622838 4632 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622841 4632 flags.go:64] FLAG: --fail-swap-on="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622845 4632 flags.go:64] FLAG: --feature-gates="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622850 4632 flags.go:64] FLAG: --file-check-frequency="20s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622853 4632 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622857 4632 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622860 4632 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622864 4632 flags.go:64] FLAG: --healthz-port="10248" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622868 4632 flags.go:64] FLAG: --help="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622871 4632 flags.go:64] FLAG: --hostname-override="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622875 4632 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622878 4632 flags.go:64] FLAG: --http-check-frequency="20s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622882 4632 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622885 4632 flags.go:64] FLAG: --image-credential-provider-config="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622889 4632 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622892 4632 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622896 4632 flags.go:64] FLAG: --image-service-endpoint="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622900 4632 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622904 4632 flags.go:64] FLAG: --kube-api-burst="100" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622907 4632 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622920 4632 flags.go:64] FLAG: --kube-api-qps="50" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622924 4632 flags.go:64] FLAG: --kube-reserved="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622928 4632 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622931 4632 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622935 4632 flags.go:64] FLAG: --kubelet-cgroups="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622943 4632 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622947 4632 flags.go:64] FLAG: --lock-file="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622950 4632 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622954 4632 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622958 4632 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622964 4632 flags.go:64] FLAG: --log-json-split-stream="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622967 4632 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622971 4632 flags.go:64] FLAG: --log-text-split-stream="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622976 4632 flags.go:64] FLAG: --logging-format="text" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622979 4632 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622983 4632 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622986 4632 flags.go:64] FLAG: --manifest-url="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622990 4632 flags.go:64] FLAG: --manifest-url-header="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.622996 4632 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623000 4632 flags.go:64] FLAG: --max-open-files="1000000" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623004 4632 flags.go:64] FLAG: --max-pods="110" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623007 4632 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623011 4632 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623015 4632 flags.go:64] FLAG: --memory-manager-policy="None" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623019 4632 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623022 4632 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623026 4632 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623030 4632 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623038 4632 flags.go:64] FLAG: --node-status-max-images="50" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623042 4632 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623045 4632 flags.go:64] FLAG: --oom-score-adj="-999" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623049 4632 flags.go:64] FLAG: --pod-cidr="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623052 4632 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623062 4632 flags.go:64] FLAG: --pod-manifest-path="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623066 4632 flags.go:64] FLAG: --pod-max-pids="-1" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623069 4632 flags.go:64] FLAG: --pods-per-core="0" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623073 4632 flags.go:64] FLAG: --port="10250" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623076 4632 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623080 4632 flags.go:64] FLAG: --provider-id="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623084 4632 flags.go:64] FLAG: --qos-reserved="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623087 4632 flags.go:64] FLAG: --read-only-port="10255" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623096 4632 flags.go:64] FLAG: --register-node="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623100 4632 flags.go:64] FLAG: --register-schedulable="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623103 4632 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623113 4632 flags.go:64] FLAG: --registry-burst="10" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623116 4632 flags.go:64] FLAG: --registry-qps="5" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623120 4632 flags.go:64] FLAG: --reserved-cpus="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623123 4632 flags.go:64] FLAG: --reserved-memory="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623128 4632 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623131 4632 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623135 4632 flags.go:64] FLAG: --rotate-certificates="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623138 4632 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623142 4632 flags.go:64] FLAG: --runonce="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623145 4632 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623149 4632 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623153 4632 flags.go:64] FLAG: --seccomp-default="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623156 4632 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623160 4632 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623163 4632 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623167 4632 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623171 4632 flags.go:64] FLAG: --storage-driver-password="root" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623175 4632 flags.go:64] FLAG: --storage-driver-secure="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623178 4632 flags.go:64] FLAG: --storage-driver-table="stats" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623181 4632 flags.go:64] FLAG: --storage-driver-user="root" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623185 4632 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623189 4632 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623192 4632 flags.go:64] FLAG: --system-cgroups="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623196 4632 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623201 4632 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623205 4632 flags.go:64] FLAG: --tls-cert-file="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623208 4632 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623215 4632 flags.go:64] FLAG: --tls-min-version="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623219 4632 flags.go:64] FLAG: --tls-private-key-file="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623223 4632 flags.go:64] FLAG: --topology-manager-policy="none" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623226 4632 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623230 4632 flags.go:64] FLAG: --topology-manager-scope="container" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623234 4632 flags.go:64] FLAG: --v="2" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623243 4632 flags.go:64] FLAG: --version="false" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623248 4632 flags.go:64] FLAG: --vmodule="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623252 4632 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.623256 4632 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625675 4632 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625702 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625709 4632 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625714 4632 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625718 4632 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625722 4632 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625726 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625729 4632 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625733 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625737 4632 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625740 4632 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625745 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625748 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625754 4632 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625760 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625765 4632 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625770 4632 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625775 4632 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625779 4632 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625782 4632 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625786 4632 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625790 4632 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625794 4632 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625797 4632 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625805 4632 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625809 4632 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625813 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625816 4632 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625820 4632 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625823 4632 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625826 4632 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625830 4632 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625834 4632 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625840 4632 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625843 4632 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625847 4632 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625850 4632 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625854 4632 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625857 4632 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625860 4632 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625863 4632 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625867 4632 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625871 4632 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625874 4632 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625877 4632 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625881 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625884 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625888 4632 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625891 4632 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625896 4632 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625900 4632 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625904 4632 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625916 4632 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625921 4632 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625924 4632 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625929 4632 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625932 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625937 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625940 4632 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625945 4632 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625948 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625952 4632 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625956 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625959 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625963 4632 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625968 4632 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625971 4632 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625975 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625979 4632 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625982 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.625985 4632 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.625992 4632 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.633229 4632 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.633256 4632 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633321 4632 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633331 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633334 4632 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633338 4632 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633342 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633346 4632 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633361 4632 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633365 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633369 4632 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633373 4632 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633377 4632 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633381 4632 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633384 4632 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633387 4632 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633390 4632 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633394 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633398 4632 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633405 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633408 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633412 4632 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633416 4632 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633419 4632 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633422 4632 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633427 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633430 4632 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633433 4632 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633436 4632 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633440 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633443 4632 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633446 4632 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633450 4632 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633453 4632 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633456 4632 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633459 4632 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633462 4632 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633465 4632 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633468 4632 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633471 4632 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633474 4632 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633477 4632 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633480 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633484 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633487 4632 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633507 4632 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633511 4632 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633515 4632 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633518 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633522 4632 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633525 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633529 4632 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633532 4632 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633536 4632 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633539 4632 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633542 4632 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633545 4632 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633549 4632 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633553 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633556 4632 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633561 4632 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633571 4632 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633575 4632 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633578 4632 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633582 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633585 4632 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633589 4632 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633593 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633597 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633600 4632 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633603 4632 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633606 4632 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633610 4632 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.633616 4632 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633758 4632 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633764 4632 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633768 4632 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633772 4632 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633776 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633780 4632 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633784 4632 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633788 4632 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633791 4632 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633795 4632 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633798 4632 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633801 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633804 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633809 4632 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633812 4632 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633816 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633820 4632 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633825 4632 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633828 4632 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633832 4632 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633835 4632 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633840 4632 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633843 4632 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633846 4632 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633850 4632 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633853 4632 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633857 4632 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633860 4632 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633863 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633866 4632 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633869 4632 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633873 4632 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633876 4632 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633879 4632 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633882 4632 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633886 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633889 4632 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633892 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633895 4632 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633898 4632 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633902 4632 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633905 4632 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633918 4632 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633922 4632 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633926 4632 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633929 4632 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633933 4632 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633936 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633939 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633942 4632 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633946 4632 feature_gate.go:330] unrecognized feature gate: Example Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633949 4632 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633952 4632 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633955 4632 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633958 4632 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633962 4632 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633965 4632 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633968 4632 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633972 4632 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633976 4632 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633979 4632 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633982 4632 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633986 4632 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633989 4632 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633992 4632 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633996 4632 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.633999 4632 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.634002 4632 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.634005 4632 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.634009 4632 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.634012 4632 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.634016 4632 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.634157 4632 server.go:940] "Client rotation is on, will bootstrap in background" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.637171 4632 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.637242 4632 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.638084 4632 server.go:997] "Starting client certificate rotation" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.638109 4632 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.638247 4632 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-21 13:05:56.595272584 +0000 UTC Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.638291 4632 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 486h22m45.956983967s for next certificate rotation Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.647600 4632 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.649483 4632 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.660205 4632 log.go:25] "Validated CRI v1 runtime API" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.677901 4632 log.go:25] "Validated CRI v1 image API" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.679113 4632 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.682056 4632 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-01-06-40-02-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.682092 4632 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:50 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.698433 4632 manager.go:217] Machine: {Timestamp:2025-12-01 06:43:10.696890064 +0000 UTC m=+0.261903056 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445406 MemoryCapacity:33654120448 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24 BootID:8ffed684-78e5-4500-8de8-2b1680e680bb Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108169 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:50 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:15:57:c7 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:15:57:c7 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:34:35:4c Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:33:92:bb Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:16:97:47 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:da:83:e1 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:5e:bd:d2:d6:3c:ad Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:de:47:2b:18:77:81 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654120448 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.698596 4632 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.698715 4632 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.698998 4632 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699145 4632 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699170 4632 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699328 4632 topology_manager.go:138] "Creating topology manager with none policy" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699337 4632 container_manager_linux.go:303] "Creating device plugin manager" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699834 4632 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699862 4632 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.699994 4632 state_mem.go:36] "Initialized new in-memory state store" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.700424 4632 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.701956 4632 kubelet.go:418] "Attempting to sync node with API server" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.701978 4632 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.702004 4632 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.702015 4632 kubelet.go:324] "Adding apiserver pod source" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.702024 4632 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.704129 4632 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.704578 4632 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.705971 4632 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.706293 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.706289 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.706408 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.706372 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707001 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707021 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707030 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707036 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707046 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707052 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707058 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707067 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707074 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707080 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707104 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707123 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707511 4632 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.707861 4632 server.go:1280] "Started kubelet" Dec 01 06:43:10 crc systemd[1]: Started Kubernetes Kubelet. Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.709031 4632 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.709051 4632 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.709366 4632 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.711696 4632 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.712627 4632 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.712650 4632 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.713179 4632 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.713207 4632 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.713278 4632 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.713274 4632 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.713496 4632 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 18:38:26.153220404 +0000 UTC Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.713533 4632 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 683h55m15.439689469s for next certificate rotation Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.713643 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.713696 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.713708 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="200ms" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.714484 4632 factory.go:55] Registering systemd factory Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.714503 4632 factory.go:221] Registration of the systemd container factory successfully Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.716717 4632 factory.go:153] Registering CRI-O factory Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.716737 4632 factory.go:221] Registration of the crio container factory successfully Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.716791 4632 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.716812 4632 factory.go:103] Registering Raw factory Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.716825 4632 manager.go:1196] Started watching for new ooms in manager Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.717160 4632 server.go:460] "Adding debug handlers to kubelet server" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.717382 4632 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.49:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d0454abf5375c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:43:10.70784086 +0000 UTC m=+0.272853832,LastTimestamp:2025-12-01 06:43:10.70784086 +0000 UTC m=+0.272853832,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.718127 4632 manager.go:319] Starting recovery of all containers Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725826 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725867 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725880 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725891 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725901 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725922 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725933 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725943 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725954 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725963 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725971 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725982 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.725992 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726003 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726030 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726046 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726056 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726066 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726077 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726087 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726097 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726107 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726117 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726127 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726136 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726147 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726159 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726169 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726180 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726191 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726218 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726229 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726239 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726249 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726258 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726268 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726277 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726287 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726297 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726308 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726317 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726328 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726339 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726361 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726374 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726384 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726395 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726405 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726414 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726425 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726434 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726446 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726501 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726513 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726526 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726538 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726548 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726557 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726568 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726576 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726586 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726596 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726606 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726617 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726626 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726635 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726646 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726655 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726663 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726673 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726684 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726694 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726703 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726711 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726720 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.726730 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728742 4632 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728785 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728803 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728816 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728828 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728841 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728851 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728862 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728874 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728894 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728907 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728928 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728939 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728951 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728960 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728973 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728984 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.728994 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729004 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729015 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729025 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729035 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729046 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729076 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729086 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729097 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729107 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729117 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729128 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729148 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729161 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729174 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729186 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729198 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729210 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729223 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729233 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729244 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729256 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729267 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729277 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729288 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729299 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729310 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729321 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729332 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729365 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729378 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729389 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729400 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729411 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729422 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729457 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729467 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729478 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729489 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729499 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729509 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729519 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729530 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729539 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729548 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729557 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.729587 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733258 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733326 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733345 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733381 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733394 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733411 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733423 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733435 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733450 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733461 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733488 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733516 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733528 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733544 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733554 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733564 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733579 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733591 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733604 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733613 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733623 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733634 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733645 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733673 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733686 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733695 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733708 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733718 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733732 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733778 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.733852 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734248 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734272 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734284 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734294 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734304 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734313 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734323 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734332 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.734342 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735033 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735081 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735097 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735110 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735121 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735134 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735148 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735160 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735171 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735183 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735194 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735207 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735467 4632 manager.go:324] Recovery completed Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735747 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735779 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735792 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735803 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735815 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735825 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735847 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735857 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735867 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735879 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735889 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735904 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735923 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735932 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735941 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735950 4632 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735958 4632 reconstruct.go:97] "Volume reconstruction finished" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.735966 4632 reconciler.go:26] "Reconciler: start to sync state" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.742873 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.743958 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.743988 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.744009 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.744643 4632 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.744657 4632 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.744672 4632 state_mem.go:36] "Initialized new in-memory state store" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.747435 4632 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.748309 4632 policy_none.go:49] "None policy: Start" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.748817 4632 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.748838 4632 state_mem.go:35] "Initializing new in-memory state store" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.748973 4632 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.748999 4632 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.749020 4632 kubelet.go:2335] "Starting kubelet main sync loop" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.749049 4632 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 01 06:43:10 crc kubenswrapper[4632]: W1201 06:43:10.749647 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.749693 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.797210 4632 manager.go:334] "Starting Device Plugin manager" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.797246 4632 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.797258 4632 server.go:79] "Starting device plugin registration server" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.798407 4632 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.798423 4632 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.798551 4632 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.798638 4632 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.798649 4632 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.804416 4632 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.850120 4632 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.850227 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851144 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851156 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851374 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851486 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.851536 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852254 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852302 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852312 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852264 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852433 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852445 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852456 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852596 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.852642 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853086 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853105 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853113 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853211 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853291 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853343 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853364 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853522 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853533 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853929 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853943 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853968 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853978 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.853948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854053 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854171 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854262 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854288 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854690 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854786 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.854807 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855117 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855150 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855205 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855270 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.855280 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.898573 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.899291 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.899316 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.899325 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.899344 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.899654 4632 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.49:6443: connect: connection refused" node="crc" Dec 01 06:43:10 crc kubenswrapper[4632]: E1201 06:43:10.914612 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="400ms" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938085 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938113 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938147 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938164 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938181 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938278 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938318 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938338 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938367 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938384 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938410 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938440 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938674 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938745 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:10 crc kubenswrapper[4632]: I1201 06:43:10.938788 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039822 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039895 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039929 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039953 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039980 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.039999 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040001 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040008 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040023 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040076 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040020 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040089 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040105 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040079 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040187 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040203 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040214 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040273 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040292 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040300 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040311 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040347 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040390 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040400 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040420 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040442 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040482 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040423 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.040519 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.099821 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.101150 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.101186 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.101198 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.101220 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.101583 4632 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.49:6443: connect: connection refused" node="crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.179892 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.198339 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.199110 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-e7c025e6a647829ca121b59959be3dbdd420010376421c59ae4fa06244a7cca9 WatchSource:0}: Error finding container e7c025e6a647829ca121b59959be3dbdd420010376421c59ae4fa06244a7cca9: Status 404 returned error can't find the container with id e7c025e6a647829ca121b59959be3dbdd420010376421c59ae4fa06244a7cca9 Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.214115 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-236027de65d059b0979ec295a98f498a4839c7a93c147559ee92dc902f2eff76 WatchSource:0}: Error finding container 236027de65d059b0979ec295a98f498a4839c7a93c147559ee92dc902f2eff76: Status 404 returned error can't find the container with id 236027de65d059b0979ec295a98f498a4839c7a93c147559ee92dc902f2eff76 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.219095 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.226323 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-1c178afc2ecef2efd9fbce07f2c18a3506890df23164860b6b7f4c17a963b223 WatchSource:0}: Error finding container 1c178afc2ecef2efd9fbce07f2c18a3506890df23164860b6b7f4c17a963b223: Status 404 returned error can't find the container with id 1c178afc2ecef2efd9fbce07f2c18a3506890df23164860b6b7f4c17a963b223 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.240595 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.244793 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.249777 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-148fbfc04a58648eecd4c8b7028d8958bc30aeb9c5ae37d364545c5fe467baad WatchSource:0}: Error finding container 148fbfc04a58648eecd4c8b7028d8958bc30aeb9c5ae37d364545c5fe467baad: Status 404 returned error can't find the container with id 148fbfc04a58648eecd4c8b7028d8958bc30aeb9c5ae37d364545c5fe467baad Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.253994 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-12684ca85d3bdc02d6a318646b1d9440696efa291693f5197c219a9873b0356f WatchSource:0}: Error finding container 12684ca85d3bdc02d6a318646b1d9440696efa291693f5197c219a9873b0356f: Status 404 returned error can't find the container with id 12684ca85d3bdc02d6a318646b1d9440696efa291693f5197c219a9873b0356f Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.315430 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="800ms" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.502696 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.503443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.503473 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.503482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.503502 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.504023 4632 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.49:6443: connect: connection refused" node="crc" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.535047 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.535198 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.677719 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.678120 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.710267 4632 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.753289 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.753382 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1c178afc2ecef2efd9fbce07f2c18a3506890df23164860b6b7f4c17a963b223"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.754524 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b" exitCode=0 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.754573 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.754589 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"236027de65d059b0979ec295a98f498a4839c7a93c147559ee92dc902f2eff76"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.754664 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.755615 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.755658 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.755672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.756713 4632 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0d2c4b50f55f202752999be170a734f2bf6c1c7059177bc5c70bf32b3bbe3ea3" exitCode=0 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.756763 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0d2c4b50f55f202752999be170a734f2bf6c1c7059177bc5c70bf32b3bbe3ea3"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.756779 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e7c025e6a647829ca121b59959be3dbdd420010376421c59ae4fa06244a7cca9"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.756844 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.757376 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.758940 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.758963 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.758976 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.758983 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.759006 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.759052 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.759971 4632 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a" exitCode=0 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.760025 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.760048 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"12684ca85d3bdc02d6a318646b1d9440696efa291693f5197c219a9873b0356f"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.760527 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.761288 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.761318 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.761329 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.762377 4632 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b" exitCode=0 Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.762406 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.762422 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"148fbfc04a58648eecd4c8b7028d8958bc30aeb9c5ae37d364545c5fe467baad"} Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.762475 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.762979 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.763061 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.765559 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.765583 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:11 crc kubenswrapper[4632]: I1201 06:43:11.765592 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:11 crc kubenswrapper[4632]: W1201 06:43:11.869877 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.26.49:6443: connect: connection refused Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.869982 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.26.49:6443: connect: connection refused" logger="UnhandledError" Dec 01 06:43:11 crc kubenswrapper[4632]: E1201 06:43:11.978449 4632 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.26.49:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187d0454abf5375c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:43:10.70784086 +0000 UTC m=+0.272853832,LastTimestamp:2025-12-01 06:43:10.70784086 +0000 UTC m=+0.272853832,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:43:12 crc kubenswrapper[4632]: E1201 06:43:12.116420 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="1.6s" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.305045 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.305996 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.306029 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.306040 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.306061 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:12 crc kubenswrapper[4632]: E1201 06:43:12.306435 4632 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.26.49:6443: connect: connection refused" node="crc" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.766206 4632 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8001a593b9645f20604d901f2c917a86ea61544de553eeb8a76e69ca3235a9e7" exitCode=0 Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.766287 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8001a593b9645f20604d901f2c917a86ea61544de553eeb8a76e69ca3235a9e7"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.766466 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.767242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.767272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.767281 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.767720 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"50d96c5ef61204ce6f65f27235cb5073bc6fc18c20aaa694143cfd9d32c8631f"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.767810 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.768429 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.768451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.768459 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770228 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770253 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770264 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770317 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770866 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.770904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772216 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772240 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772249 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772307 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772777 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.772785 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.775997 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776021 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776033 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776042 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776049 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1"} Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776116 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776634 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:12 crc kubenswrapper[4632]: I1201 06:43:12.776642 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.779510 4632 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="9811922604b7c9c3d16684362ff5029b692e21f4aa234a1f77131dcade7b1baf" exitCode=0 Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.779581 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"9811922604b7c9c3d16684362ff5029b692e21f4aa234a1f77131dcade7b1baf"} Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.779608 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.779682 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780324 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780349 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780391 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780400 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.780377 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.907164 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.907883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.907921 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.907930 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:13 crc kubenswrapper[4632]: I1201 06:43:13.907948 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784752 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7009150a923be5d8fa956d16f8538d6229318b2187c19c8063b6ef0279a2e716"} Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784788 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"07a44b45a108c7863728a8fb8bd0e37ce8fd37af4d2e4afb9e032e059c1e981d"} Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784798 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e573f82e576ea344fc02cc74cba66b722e8e1e7402197f169f3bdb0f575f9231"} Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784807 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"205d7d446d5e154ed8ace14bee0d57ce0659c4ba33106a48c49fb1048ffd0018"} Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784814 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fe9f3c85414e07c1ece6673f05f3fff0a0ff5b03db1c85b6ec24b3945ca5f8e0"} Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.784892 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.785451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.785471 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.785479 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:14 crc kubenswrapper[4632]: I1201 06:43:14.991924 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.141668 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.141792 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.142519 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.142555 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.142573 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.787012 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.788378 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.788404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:15 crc kubenswrapper[4632]: I1201 06:43:15.788412 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.373235 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.373393 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.374190 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.374223 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.374233 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.584264 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.788777 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.789426 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.789451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:16 crc kubenswrapper[4632]: I1201 06:43:16.789459 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.123231 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.123349 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.124053 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.124079 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.124087 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.387499 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.415999 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.416089 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.416836 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.416864 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.416874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.790713 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.791411 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.791446 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:17 crc kubenswrapper[4632]: I1201 06:43:17.791454 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.113162 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.113255 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.114098 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.114128 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.114138 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.338112 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.341798 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.792885 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.795387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.795443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:18 crc kubenswrapper[4632]: I1201 06:43:18.795456 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:19 crc kubenswrapper[4632]: I1201 06:43:19.795133 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:19 crc kubenswrapper[4632]: I1201 06:43:19.795889 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:19 crc kubenswrapper[4632]: I1201 06:43:19.795916 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:19 crc kubenswrapper[4632]: I1201 06:43:19.795924 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:20 crc kubenswrapper[4632]: E1201 06:43:20.804688 4632 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.113407 4632 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.113458 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.562774 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.562929 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.564699 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.564741 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:21 crc kubenswrapper[4632]: I1201 06:43:21.564751 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:22 crc kubenswrapper[4632]: I1201 06:43:22.710811 4632 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 01 06:43:23 crc kubenswrapper[4632]: W1201 06:43:23.137808 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.137879 4632 trace.go:236] Trace[1230051709]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:43:13.136) (total time: 10001ms): Dec 01 06:43:23 crc kubenswrapper[4632]: Trace[1230051709]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:43:23.137) Dec 01 06:43:23 crc kubenswrapper[4632]: Trace[1230051709]: [10.001219026s] [10.001219026s] END Dec 01 06:43:23 crc kubenswrapper[4632]: E1201 06:43:23.137902 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 01 06:43:23 crc kubenswrapper[4632]: W1201 06:43:23.475151 4632 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.475219 4632 trace.go:236] Trace[1432239951]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:43:13.473) (total time: 10001ms): Dec 01 06:43:23 crc kubenswrapper[4632]: Trace[1432239951]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (06:43:23.475) Dec 01 06:43:23 crc kubenswrapper[4632]: Trace[1432239951]: [10.0016015s] [10.0016015s] END Dec 01 06:43:23 crc kubenswrapper[4632]: E1201 06:43:23.475234 4632 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 01 06:43:23 crc kubenswrapper[4632]: E1201 06:43:23.717794 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.770457 4632 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.770514 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.773303 4632 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 01 06:43:23 crc kubenswrapper[4632]: I1201 06:43:23.773363 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.008430 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.008562 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.009341 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.009387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.009397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.016827 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.803830 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.804465 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.804495 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:25 crc kubenswrapper[4632]: I1201 06:43:25.804504 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.376593 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.376715 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.377569 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.377599 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.377629 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.618185 4632 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.710079 4632 apiserver.go:52] "Watching apiserver" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.713201 4632 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.713407 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.713604 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.713700 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:26 crc kubenswrapper[4632]: E1201 06:43:26.713830 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.714087 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.714095 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:26 crc kubenswrapper[4632]: E1201 06:43:26.714139 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.714089 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:26 crc kubenswrapper[4632]: E1201 06:43:26.714190 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.714094 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.715663 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.715835 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.715896 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.715914 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.715896 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.716042 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.716611 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.716817 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.716835 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.733790 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.741199 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.747834 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.755078 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.761582 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.767506 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.774315 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:26 crc kubenswrapper[4632]: I1201 06:43:26.813856 4632 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.126536 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.129761 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.132980 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.135133 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.139243 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.145683 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.151559 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.157945 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.163388 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.169416 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.175808 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.181410 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.187132 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.192526 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.198000 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.203766 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.797035 4632 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 06:43:27 crc kubenswrapper[4632]: I1201 06:43:27.806804 4632 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 06:43:27 crc kubenswrapper[4632]: E1201 06:43:27.811083 4632 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.749580 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.749665 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.749695 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.749592 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.749771 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.749834 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.773090 4632 trace.go:236] Trace[785014604]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:43:14.747) (total time: 14025ms): Dec 01 06:43:28 crc kubenswrapper[4632]: Trace[785014604]: ---"Objects listed" error: 14025ms (06:43:28.773) Dec 01 06:43:28 crc kubenswrapper[4632]: Trace[785014604]: [14.02575109s] [14.02575109s] END Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.773105 4632 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.773957 4632 trace.go:236] Trace[1028275292]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (01-Dec-2025 06:43:14.761) (total time: 14011ms): Dec 01 06:43:28 crc kubenswrapper[4632]: Trace[1028275292]: ---"Objects listed" error: 14011ms (06:43:28.773) Dec 01 06:43:28 crc kubenswrapper[4632]: Trace[1028275292]: [14.01195809s] [14.01195809s] END Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.773983 4632 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.774314 4632 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.774636 4632 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.785753 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.794657 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.802917 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.810159 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.816793 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.823169 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.829601 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.845560 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875091 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875125 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875157 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875173 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875193 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875208 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875221 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875235 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875249 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875263 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875283 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875297 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875311 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875327 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875341 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875374 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875392 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875408 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875417 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875435 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875477 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875498 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875514 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875529 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875543 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875538 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875558 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875610 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875631 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875647 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875662 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875677 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875694 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875687 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875708 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875710 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875723 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875738 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875752 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875768 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875783 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875797 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875814 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875828 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875832 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875846 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875860 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875841 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875893 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875909 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875924 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875938 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875953 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875967 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875997 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.875997 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876011 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876028 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876041 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876057 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876070 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876083 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876097 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876111 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876125 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876135 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876144 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876160 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876175 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876189 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876203 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876216 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876231 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876245 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876258 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876273 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876290 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876304 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876318 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876331 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876346 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876379 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876395 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876410 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876426 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876440 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876467 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876482 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876495 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876510 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876524 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876539 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876553 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876568 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876583 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876599 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876613 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876627 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876644 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876659 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876674 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876743 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876762 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876777 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876792 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876806 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876820 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876834 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876848 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876863 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876876 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876890 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876904 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876917 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876935 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876953 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876969 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876999 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877014 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877030 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877044 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877059 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877073 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877087 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877101 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877115 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877130 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877144 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877158 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877172 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877186 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877201 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877216 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877230 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877244 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877259 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877274 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877288 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877303 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877318 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877331 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877346 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877383 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877398 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877414 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877429 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877445 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877461 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877476 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877491 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877506 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877521 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877535 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877550 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877568 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877582 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877598 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877612 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877626 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877640 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877655 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877670 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877685 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877699 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877715 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877730 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877745 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877761 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877776 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877790 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877805 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877820 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877836 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877851 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877866 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877881 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877895 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877909 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877924 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877938 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877954 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877969 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878000 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878016 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878031 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878046 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878064 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878081 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878097 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878116 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878134 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878150 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878167 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878183 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878199 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878215 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878230 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878245 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878261 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878276 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878293 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878309 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878329 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878346 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878422 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878446 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878466 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878482 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878501 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878519 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878539 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878554 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878570 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878587 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878603 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878619 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878634 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878650 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878690 4632 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878702 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878711 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878720 4632 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878729 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878738 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878747 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878757 4632 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878766 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876155 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876237 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876291 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876426 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876520 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876544 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876601 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876679 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876708 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876908 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876940 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.876967 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877044 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877206 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877342 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877392 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877394 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877498 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877532 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877531 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877626 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877632 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877660 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877669 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877782 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877794 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877799 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877814 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877917 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877929 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.877947 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878008 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878059 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878085 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878282 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878288 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878343 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878448 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878480 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878480 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878523 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878545 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878650 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878699 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878766 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878805 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.878814 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.879458 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886735 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.879656 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.879749 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.879988 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.880712 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881118 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881161 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881165 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881179 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881443 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881552 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881630 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881704 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.881711 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882099 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882109 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882276 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882313 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882339 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882420 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882448 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882347 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882492 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882582 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882709 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882767 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882773 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.882820 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.883118 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:43:29.382843346 +0000 UTC m=+18.947856319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.883255 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.883264 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.883333 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.883420 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.883495 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.885366 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.885589 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.885676 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.885815 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886073 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886019 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886224 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886321 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886333 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886336 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886705 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886741 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.886838 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.886863 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887116 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887131 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887202 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887507 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887654 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887740 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887859 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888168 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888267 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888277 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888275 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888389 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888514 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888574 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888616 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888633 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887833 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.887946 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888028 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888829 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888811 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888958 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889058 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889079 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889090 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889149 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889181 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889249 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889476 4632 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889617 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.889072 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890185 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890441 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890570 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.890589 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:29.390570005 +0000 UTC m=+18.955582977 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890625 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.888846 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890691 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890716 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890937 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.890957 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891139 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891280 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891316 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891495 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891677 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.891823 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.891838 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.891920 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:29.391892426 +0000 UTC m=+18.956905399 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892040 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892455 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892549 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892672 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892689 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892754 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892769 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.892808 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.893059 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.893069 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.893427 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.893674 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.895412 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.896000 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.896007 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.896317 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.896618 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.897601 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.897866 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.898266 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.898540 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.898589 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.898705 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.898889 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.899397 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.899640 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.899869 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.899886 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.899898 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.900414 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.900466 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:29.400452505 +0000 UTC m=+18.965465478 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.900519 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.900668 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.900810 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.900827 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.900837 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:28 crc kubenswrapper[4632]: E1201 06:43:28.900868 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:29.40085807 +0000 UTC m=+18.965871042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.901596 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.901773 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.904225 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.904409 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.904738 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907377 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907448 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907522 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907641 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907663 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907673 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907857 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907930 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.907944 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.908117 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.908256 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.908266 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.908455 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.908157 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.909369 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.909399 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.916853 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.922827 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.926399 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.929482 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979799 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979837 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979893 4632 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979902 4632 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979911 4632 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979918 4632 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979926 4632 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979925 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979934 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980087 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980098 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980106 4632 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980115 4632 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980123 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980131 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980139 4632 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980147 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980155 4632 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980162 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980172 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980180 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980187 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980196 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980203 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980210 4632 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980218 4632 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980225 4632 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980232 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980240 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980248 4632 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980255 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980263 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980271 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980278 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980286 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980293 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980300 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980307 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980316 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980323 4632 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980332 4632 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980339 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980347 4632 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980373 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980381 4632 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980388 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980397 4632 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980405 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980412 4632 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980419 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980426 4632 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980434 4632 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980441 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980449 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980457 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980465 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980474 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980482 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980489 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980497 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980505 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980513 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980520 4632 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980528 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980536 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980545 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980552 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980561 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980569 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980576 4632 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980583 4632 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980591 4632 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980597 4632 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980605 4632 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980612 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980620 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980627 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980635 4632 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980643 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980651 4632 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980658 4632 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980666 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980674 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980681 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980688 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980696 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980703 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980710 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980718 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980725 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980732 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980739 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980746 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980753 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980761 4632 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980769 4632 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980777 4632 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980784 4632 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980792 4632 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980800 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980807 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980815 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980822 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980829 4632 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980837 4632 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980844 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980851 4632 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980858 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980865 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980872 4632 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980879 4632 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980888 4632 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980895 4632 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980903 4632 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980910 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980917 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980924 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980932 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980940 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980947 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.980955 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981075 4632 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981087 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981097 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981106 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981142 4632 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981151 4632 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981162 4632 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981176 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981184 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981192 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981199 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981208 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981215 4632 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981222 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981230 4632 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981236 4632 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981244 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981251 4632 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981259 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981266 4632 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981275 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981324 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981370 4632 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.979967 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981459 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981492 4632 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981504 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981512 4632 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981520 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981528 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981535 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981543 4632 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981550 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981557 4632 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981566 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981573 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981582 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981590 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981597 4632 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981604 4632 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981612 4632 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981626 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981633 4632 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981640 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981648 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981655 4632 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981662 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981670 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981677 4632 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981684 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981690 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981697 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981704 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981711 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981717 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981725 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981732 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981739 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981746 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981754 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981761 4632 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981767 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981774 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981783 4632 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981790 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981797 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981804 4632 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:28 crc kubenswrapper[4632]: I1201 06:43:28.981811 4632 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.125373 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.129683 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.134562 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 01 06:43:29 crc kubenswrapper[4632]: W1201 06:43:29.135205 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-a275f3f1b72bda67e08fc1ec5fba646df95b4f0aa2251e32775fe30cd839f1e7 WatchSource:0}: Error finding container a275f3f1b72bda67e08fc1ec5fba646df95b4f0aa2251e32775fe30cd839f1e7: Status 404 returned error can't find the container with id a275f3f1b72bda67e08fc1ec5fba646df95b4f0aa2251e32775fe30cd839f1e7 Dec 01 06:43:29 crc kubenswrapper[4632]: W1201 06:43:29.139320 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-77a3501d017a70f27ede8a43f00ee927180fc9ab44d5b894948812848b082e52 WatchSource:0}: Error finding container 77a3501d017a70f27ede8a43f00ee927180fc9ab44d5b894948812848b082e52: Status 404 returned error can't find the container with id 77a3501d017a70f27ede8a43f00ee927180fc9ab44d5b894948812848b082e52 Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.319194 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.322893 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.324891 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.326055 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.332858 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.339690 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.346540 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.353069 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.360061 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.367289 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.373313 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.379915 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.385464 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.385605 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:43:30.38558746 +0000 UTC m=+19.950600433 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.387378 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.394011 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.401158 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.407679 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.413722 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.420529 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.485944 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.485992 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.486019 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.486038 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486100 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486117 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486123 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486167 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486174 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486177 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:30.486154538 +0000 UTC m=+20.051167511 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486184 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486196 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:30.48618809 +0000 UTC m=+20.051201064 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486178 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486209 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:30.486204111 +0000 UTC m=+20.051217084 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486209 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.486235 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:30.486223938 +0000 UTC m=+20.051236911 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.811324 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3"} Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.811382 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"a275f3f1b72bda67e08fc1ec5fba646df95b4f0aa2251e32775fe30cd839f1e7"} Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.812850 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354"} Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.812880 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60"} Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.812891 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0cd1fc49f067aa830bc90f90debf65f9537cc299a6ad23d103bfbf25ab26dfd5"} Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.814444 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"77a3501d017a70f27ede8a43f00ee927180fc9ab44d5b894948812848b082e52"} Dec 01 06:43:29 crc kubenswrapper[4632]: E1201 06:43:29.818134 4632 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.822865 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.832492 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.842500 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.850341 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.859438 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.866821 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.877284 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.885130 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.892284 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.902035 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.913227 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.928452 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.945293 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.958776 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.970857 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:29 crc kubenswrapper[4632]: I1201 06:43:29.979516 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:29Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.168635 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-gm9xs"] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.168985 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-49pcd"] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.169145 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.169158 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.170313 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.170605 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.172270 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.172387 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.172786 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.172844 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.173243 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.173244 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.181549 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.211084 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.221437 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.229711 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.238256 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.246709 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.254740 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.262459 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.270844 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.278334 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.286528 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293171 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/168bb8aa-8b43-44df-836f-90d6d52f1539-mcd-auth-proxy-config\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293292 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftxfc\" (UniqueName: \"kubernetes.io/projected/168bb8aa-8b43-44df-836f-90d6d52f1539-kube-api-access-ftxfc\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293417 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/168bb8aa-8b43-44df-836f-90d6d52f1539-proxy-tls\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293499 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxnk7\" (UniqueName: \"kubernetes.io/projected/0c73ddd8-a673-4565-a30b-5b4d5b708edd-kube-api-access-pxnk7\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293574 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/168bb8aa-8b43-44df-836f-90d6d52f1539-rootfs\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.293635 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c73ddd8-a673-4565-a30b-5b4d5b708edd-hosts-file\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.296934 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.304375 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.314322 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.324516 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.331709 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.342164 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.353124 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.365477 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.373582 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394817 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394880 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/168bb8aa-8b43-44df-836f-90d6d52f1539-proxy-tls\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394897 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxnk7\" (UniqueName: \"kubernetes.io/projected/0c73ddd8-a673-4565-a30b-5b4d5b708edd-kube-api-access-pxnk7\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394917 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/168bb8aa-8b43-44df-836f-90d6d52f1539-rootfs\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394931 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c73ddd8-a673-4565-a30b-5b4d5b708edd-hosts-file\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394951 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/168bb8aa-8b43-44df-836f-90d6d52f1539-mcd-auth-proxy-config\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.394982 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftxfc\" (UniqueName: \"kubernetes.io/projected/168bb8aa-8b43-44df-836f-90d6d52f1539-kube-api-access-ftxfc\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.395220 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:43:32.395207193 +0000 UTC m=+21.960220166 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.395219 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/0c73ddd8-a673-4565-a30b-5b4d5b708edd-hosts-file\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.395245 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/168bb8aa-8b43-44df-836f-90d6d52f1539-rootfs\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.395759 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/168bb8aa-8b43-44df-836f-90d6d52f1539-mcd-auth-proxy-config\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.400658 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/168bb8aa-8b43-44df-836f-90d6d52f1539-proxy-tls\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.406623 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxnk7\" (UniqueName: \"kubernetes.io/projected/0c73ddd8-a673-4565-a30b-5b4d5b708edd-kube-api-access-pxnk7\") pod \"node-resolver-49pcd\" (UID: \"0c73ddd8-a673-4565-a30b-5b4d5b708edd\") " pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.410786 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftxfc\" (UniqueName: \"kubernetes.io/projected/168bb8aa-8b43-44df-836f-90d6d52f1539-kube-api-access-ftxfc\") pod \"machine-config-daemon-gm9xs\" (UID: \"168bb8aa-8b43-44df-836f-90d6d52f1539\") " pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.477832 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.485765 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-49pcd" Dec 01 06:43:30 crc kubenswrapper[4632]: W1201 06:43:30.486423 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod168bb8aa_8b43_44df_836f_90d6d52f1539.slice/crio-e38e07cbf8aea7899796200e999ae82ec907f10d6c3acbee9610bf7a952445a7 WatchSource:0}: Error finding container e38e07cbf8aea7899796200e999ae82ec907f10d6c3acbee9610bf7a952445a7: Status 404 returned error can't find the container with id e38e07cbf8aea7899796200e999ae82ec907f10d6c3acbee9610bf7a952445a7 Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.495815 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.495879 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.495901 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.495922 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.495988 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496012 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496015 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496022 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496029 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496035 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496059 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:32.496047875 +0000 UTC m=+22.061060849 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496074 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:32.496068774 +0000 UTC m=+22.061081748 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496049 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496097 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496108 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:32.496093161 +0000 UTC m=+22.061106135 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.496127 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:32.496121184 +0000 UTC m=+22.061134157 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.542133 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-rs4h5"] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.542635 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.546343 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.546480 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.546525 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.546593 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.546765 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.555857 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.571150 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.581404 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.594520 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.609555 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.619878 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.634715 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.644734 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.662057 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.671847 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.680862 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698116 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzg5n\" (UniqueName: \"kubernetes.io/projected/c2ef6994-166c-4195-af3f-a55124a4c441-kube-api-access-qzg5n\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698143 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-system-cni-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698158 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-os-release\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698173 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698192 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-cnibin\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698219 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-binary-copy\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.698244 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.749672 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.749715 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.749744 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.749776 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.749843 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:30 crc kubenswrapper[4632]: E1201 06:43:30.749914 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.752515 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.752962 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.753635 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.754184 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.754699 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.755160 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.755675 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.756144 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.756683 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.757134 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.757624 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.758198 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.758260 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.758719 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.759201 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.759671 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.760152 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.760661 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.761008 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.763940 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.764465 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.765201 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.765697 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.766079 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.766989 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.767392 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.767686 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.768273 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.768946 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.769714 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.770232 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.770958 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.771437 4632 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.771531 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.773253 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.773694 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.774047 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.775440 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.776290 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.776752 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.777619 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.778170 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.778892 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.779420 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.780274 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.780824 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.781583 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.782049 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.782796 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.783399 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.783537 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.784253 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.784686 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.785431 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.785865 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.786369 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.787098 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799303 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzg5n\" (UniqueName: \"kubernetes.io/projected/c2ef6994-166c-4195-af3f-a55124a4c441-kube-api-access-qzg5n\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799341 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-system-cni-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799378 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-os-release\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799637 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799430 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-system-cni-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799713 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-cnibin\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799735 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-binary-copy\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799590 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-os-release\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.799787 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-cnibin\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.800010 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c2ef6994-166c-4195-af3f-a55124a4c441-tuning-conf-dir\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.800317 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-binary-copy\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.800371 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.800435 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c2ef6994-166c-4195-af3f-a55124a4c441-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.809312 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.816713 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.817501 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzg5n\" (UniqueName: \"kubernetes.io/projected/c2ef6994-166c-4195-af3f-a55124a4c441-kube-api-access-qzg5n\") pod \"multus-additional-cni-plugins-rs4h5\" (UID: \"c2ef6994-166c-4195-af3f-a55124a4c441\") " pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.817703 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-49pcd" event={"ID":"0c73ddd8-a673-4565-a30b-5b4d5b708edd","Type":"ContainerStarted","Data":"676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.817737 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-49pcd" event={"ID":"0c73ddd8-a673-4565-a30b-5b4d5b708edd","Type":"ContainerStarted","Data":"58976470387b4facc10d428cca4adb2ea6fda702d074193d49b48fe7ff0aa144"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.818915 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.818948 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.818958 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"e38e07cbf8aea7899796200e999ae82ec907f10d6c3acbee9610bf7a952445a7"} Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.822189 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.830777 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.839346 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.849174 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.855058 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.859262 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: W1201 06:43:30.865421 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2ef6994_166c_4195_af3f_a55124a4c441.slice/crio-b90ae4e3797c9816877fbde705291f08fee3da493739b02d63d950aa5c64a2c0 WatchSource:0}: Error finding container b90ae4e3797c9816877fbde705291f08fee3da493739b02d63d950aa5c64a2c0: Status 404 returned error can't find the container with id b90ae4e3797c9816877fbde705291f08fee3da493739b02d63d950aa5c64a2c0 Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.869192 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.881339 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.889184 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.904614 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.906343 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-zpkn8"] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.906614 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zpkn8" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.908015 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.908151 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.908168 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gklnd"] Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.908874 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.910050 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.910055 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.911615 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.911740 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.911871 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.912002 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.918243 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.918464 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.951847 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:30 crc kubenswrapper[4632]: I1201 06:43:30.986918 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005705 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005750 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-socket-dir-parent\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005768 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp9r6\" (UniqueName: \"kubernetes.io/projected/45a865b5-e289-4d8f-93d3-007d46f49be9-kube-api-access-zp9r6\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005783 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005799 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-k8s-cni-cncf-io\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005813 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-kubelet\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005827 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005841 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005856 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005870 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-system-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005884 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-conf-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005899 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-daemon-config\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005912 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005932 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005945 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005962 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.005993 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006006 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006019 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-622pg\" (UniqueName: \"kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006033 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006050 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006068 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006087 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-cni-binary-copy\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006101 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-multus\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006113 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006127 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006141 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-cnibin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006155 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-bin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006177 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006193 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006206 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-os-release\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006221 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-hostroot\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006234 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006246 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006259 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-etc-kubernetes\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006275 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-netns\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.006288 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-multus-certs\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.028057 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.066612 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107039 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107071 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107089 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107110 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107123 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107137 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-622pg\" (UniqueName: \"kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107157 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107164 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107216 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107223 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107241 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107237 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107257 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-cni-binary-copy\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107298 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107335 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-multus\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107244 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107366 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107363 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107385 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-multus\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107411 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107312 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107423 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107442 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-cnibin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107448 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107475 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-bin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107480 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-cnibin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107458 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-cni-bin\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107514 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107529 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107545 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-os-release\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107562 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-hostroot\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107577 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107583 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-hostroot\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107592 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107558 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107614 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-os-release\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107622 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107615 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107644 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-netns\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107666 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-multus-certs\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107683 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-etc-kubernetes\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107690 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-netns\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107703 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-socket-dir-parent\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107719 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp9r6\" (UniqueName: \"kubernetes.io/projected/45a865b5-e289-4d8f-93d3-007d46f49be9-kube-api-access-zp9r6\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107725 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-etc-kubernetes\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107733 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107720 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-multus-certs\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107740 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-socket-dir-parent\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107760 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-k8s-cni-cncf-io\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107776 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-kubelet\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107789 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107803 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107816 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107830 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-var-lib-kubelet\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107833 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107847 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107859 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-daemon-config\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107870 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-host-run-k8s-cni-cncf-io\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107882 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107887 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107874 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107905 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107915 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-system-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107930 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-conf-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.107969 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-conf-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108171 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45a865b5-e289-4d8f-93d3-007d46f49be9-system-cni-dir\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108316 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108489 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108538 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108727 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-cni-binary-copy\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108729 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.108793 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/45a865b5-e289-4d8f-93d3-007d46f49be9-multus-daemon-config\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.109778 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.131604 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-622pg\" (UniqueName: \"kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg\") pod \"ovnkube-node-gklnd\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.160388 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp9r6\" (UniqueName: \"kubernetes.io/projected/45a865b5-e289-4d8f-93d3-007d46f49be9-kube-api-access-zp9r6\") pod \"multus-zpkn8\" (UID: \"45a865b5-e289-4d8f-93d3-007d46f49be9\") " pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.185026 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.219225 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-zpkn8" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.225687 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.227169 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: W1201 06:43:31.227984 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45a865b5_e289_4d8f_93d3_007d46f49be9.slice/crio-6b4413f7930224740039c860f451d3bab7397cc03d66c7fa2831782a7b1bc2eb WatchSource:0}: Error finding container 6b4413f7930224740039c860f451d3bab7397cc03d66c7fa2831782a7b1bc2eb: Status 404 returned error can't find the container with id 6b4413f7930224740039c860f451d3bab7397cc03d66c7fa2831782a7b1bc2eb Dec 01 06:43:31 crc kubenswrapper[4632]: W1201 06:43:31.234486 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac685f74_ea0b_4a05_8018_a68fc1df20cc.slice/crio-825340cbe67a037e0f6a07f5b18f091693654210e4eb3c5b64afdc6f1103530f WatchSource:0}: Error finding container 825340cbe67a037e0f6a07f5b18f091693654210e4eb3c5b64afdc6f1103530f: Status 404 returned error can't find the container with id 825340cbe67a037e0f6a07f5b18f091693654210e4eb3c5b64afdc6f1103530f Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.266607 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.316669 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.344996 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.387803 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.425731 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.466074 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.507997 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.546881 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.592514 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.627564 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.667987 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.708625 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.746382 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.787327 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.822748 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238" exitCode=0 Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.822806 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.822829 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerStarted","Data":"b90ae4e3797c9816877fbde705291f08fee3da493739b02d63d950aa5c64a2c0"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.824190 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" exitCode=0 Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.824250 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.824297 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"825340cbe67a037e0f6a07f5b18f091693654210e4eb3c5b64afdc6f1103530f"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.826732 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerStarted","Data":"40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.826768 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerStarted","Data":"6b4413f7930224740039c860f451d3bab7397cc03d66c7fa2831782a7b1bc2eb"} Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.831757 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.868345 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.907391 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.946111 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.974948 4632 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.976011 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.976094 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.976161 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.976331 4632 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 01 06:43:31 crc kubenswrapper[4632]: I1201 06:43:31.985919 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:31Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.041305 4632 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.041482 4632 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.042196 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.042229 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.042238 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.042251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.042259 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.055028 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.057645 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.057677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.057685 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.057697 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.057706 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.066195 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.068573 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.070538 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.070574 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.070584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.070597 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.070605 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.079022 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.081235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.081274 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.081283 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.081297 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.081305 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.090739 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.094345 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.094397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.094406 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.094418 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.094428 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.104790 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.104912 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.106065 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.106092 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.106100 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.106114 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.106122 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.108185 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.146863 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.186561 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.207786 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.207819 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.207828 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.207845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.207853 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.227365 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.269857 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.306554 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.310049 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.310075 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.310084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.310096 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.310105 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.346664 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.384904 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.411728 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.411759 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.411768 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.411780 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.411789 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.420482 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.420618 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:43:36.4206043 +0000 UTC m=+25.985617273 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.427073 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.466925 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.505654 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.513928 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.513966 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.513988 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.514001 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.514009 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.521768 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.521810 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.521835 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.521870 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.521937 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.521970 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522003 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:36.521990861 +0000 UTC m=+26.087003834 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522020 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522025 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:36.522014045 +0000 UTC m=+26.087027017 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522028 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522062 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522035 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522121 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522075 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522202 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:36.522174047 +0000 UTC m=+26.087187020 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.522240 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:36.522224371 +0000 UTC m=+26.087237354 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.546324 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.587079 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.616153 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.616186 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.616194 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.616207 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.616217 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.626467 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.666737 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.705923 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.718201 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.718228 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.718236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.718249 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.718258 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.747262 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.749481 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.749512 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.749510 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.749576 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.749648 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:32 crc kubenswrapper[4632]: E1201 06:43:32.749765 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.790082 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.820145 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.820176 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.820187 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.820200 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.820207 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.826334 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831720 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831754 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831765 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831774 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831781 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.831788 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.833054 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3" exitCode=0 Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.833077 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.866994 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.907330 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.921888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.921919 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.921930 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.921942 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.921952 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:32Z","lastTransitionTime":"2025-12-01T06:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.946962 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:32 crc kubenswrapper[4632]: I1201 06:43:32.987368 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:32Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.023818 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.023843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.023852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.023864 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.023872 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.026760 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.036146 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-hq8m4"] Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.036428 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.060632 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.080564 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.100226 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.121125 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126257 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126311 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126325 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126334 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126637 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ede6d987-8ead-4c6e-8492-655f67bb0476-host\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126675 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ds6p\" (UniqueName: \"kubernetes.io/projected/ede6d987-8ead-4c6e-8492-655f67bb0476-kube-api-access-5ds6p\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.126711 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ede6d987-8ead-4c6e-8492-655f67bb0476-serviceca\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.145783 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.186710 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.227227 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ede6d987-8ead-4c6e-8492-655f67bb0476-host\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.227264 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ds6p\" (UniqueName: \"kubernetes.io/projected/ede6d987-8ead-4c6e-8492-655f67bb0476-kube-api-access-5ds6p\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.227298 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ede6d987-8ead-4c6e-8492-655f67bb0476-serviceca\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.227366 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ede6d987-8ead-4c6e-8492-655f67bb0476-host\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.227970 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.228015 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.228024 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.228036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.228044 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.228052 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ede6d987-8ead-4c6e-8492-655f67bb0476-serviceca\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.239019 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.260852 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ds6p\" (UniqueName: \"kubernetes.io/projected/ede6d987-8ead-4c6e-8492-655f67bb0476-kube-api-access-5ds6p\") pod \"node-ca-hq8m4\" (UID: \"ede6d987-8ead-4c6e-8492-655f67bb0476\") " pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.287166 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.326114 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.329665 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.329692 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.329700 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.329712 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.329721 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.345555 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hq8m4" Dec 01 06:43:33 crc kubenswrapper[4632]: W1201 06:43:33.354130 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podede6d987_8ead_4c6e_8492_655f67bb0476.slice/crio-37ecedea4fee539f7fdd0cef47286fcfe78984383045c58c602c8a03bb90f494 WatchSource:0}: Error finding container 37ecedea4fee539f7fdd0cef47286fcfe78984383045c58c602c8a03bb90f494: Status 404 returned error can't find the container with id 37ecedea4fee539f7fdd0cef47286fcfe78984383045c58c602c8a03bb90f494 Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.368396 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.407157 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.431475 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.431508 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.431517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.431530 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.431539 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.445971 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.488008 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.527290 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.533545 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.533575 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.533584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.533598 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.533613 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.568388 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.606572 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.638212 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.638242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.638251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.638263 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.638274 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.647876 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.686489 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.726280 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.739993 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.740016 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.740024 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.740038 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.740047 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.766886 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.808519 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.836726 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hq8m4" event={"ID":"ede6d987-8ead-4c6e-8492-655f67bb0476","Type":"ContainerStarted","Data":"7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.836765 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hq8m4" event={"ID":"ede6d987-8ead-4c6e-8492-655f67bb0476","Type":"ContainerStarted","Data":"37ecedea4fee539f7fdd0cef47286fcfe78984383045c58c602c8a03bb90f494"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.838805 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46" exitCode=0 Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.838829 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.841483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.841515 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.841523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.841534 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.841542 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.853868 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.886925 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.927503 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.943672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.943701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.943710 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.943723 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.943731 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:33Z","lastTransitionTime":"2025-12-01T06:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:33 crc kubenswrapper[4632]: I1201 06:43:33.968153 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.007424 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.045744 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.045770 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.045780 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.045794 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.045806 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.049368 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.086311 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.127646 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.148290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.148322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.148332 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.148345 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.148367 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.165641 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.207164 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.247096 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.250814 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.250857 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.250869 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.250886 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.250897 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.291572 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.326152 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.353523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.353563 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.353574 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.353589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.353598 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.370992 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.408387 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.446704 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.455944 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.456005 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.456015 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.456032 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.456041 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.486222 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.525021 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.558661 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.558698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.558707 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.558724 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.558733 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.567332 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.606689 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.660337 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.660493 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.660568 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.660629 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.660688 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.749343 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.749396 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:34 crc kubenswrapper[4632]: E1201 06:43:34.749467 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.749511 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:34 crc kubenswrapper[4632]: E1201 06:43:34.749624 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:34 crc kubenswrapper[4632]: E1201 06:43:34.749681 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.762181 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.762280 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.762429 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.762497 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.762551 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.844274 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.846045 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c" exitCode=0 Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.846082 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.856499 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.864321 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.864375 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.864386 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.864407 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.864417 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.865574 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.874839 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.882946 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.891526 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.901255 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.909848 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.930675 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.965846 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.965883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.965892 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.965905 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.965914 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:34Z","lastTransitionTime":"2025-12-01T06:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:34 crc kubenswrapper[4632]: I1201 06:43:34.967408 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:34Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.007972 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.047492 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.068225 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.068276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.068290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.068302 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.068310 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.086544 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.133107 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.167832 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.170045 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.170081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.170100 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.170112 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.170120 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.272209 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.272250 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.272258 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.272273 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.272283 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.374013 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.374047 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.374056 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.374067 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.374076 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.475814 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.475847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.475856 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.475867 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.475876 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.577883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.578075 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.578085 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.578097 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.578105 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.680020 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.680054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.680063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.680076 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.680084 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.781516 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.781542 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.781550 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.781560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.781567 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.851722 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e" exitCode=0 Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.851792 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.862086 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.870169 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.882840 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.882866 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.882875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.882888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.882896 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.893812 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.911283 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.921877 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.934403 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.946851 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.956992 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.967749 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.976521 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984280 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984311 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984331 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:35Z","lastTransitionTime":"2025-12-01T06:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.984340 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:35 crc kubenswrapper[4632]: I1201 06:43:35.993086 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:35Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.002263 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.009371 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.085897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.085931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.085939 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.085951 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.085960 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.187869 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.187902 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.187911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.187922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.187930 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.289823 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.289852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.289860 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.289872 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.289879 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.391583 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.391612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.391621 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.391633 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.391641 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.459704 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.459842 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.459826601 +0000 UTC m=+34.024839574 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.493268 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.493335 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.493373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.493400 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.493439 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.560517 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.560552 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.560571 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.560590 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560655 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560688 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.560677753 +0000 UTC m=+34.125690726 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560730 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560754 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560765 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560770 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560799 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.560790296 +0000 UTC m=+34.125803268 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560843 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560868 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.560853674 +0000 UTC m=+34.125866647 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560880 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560896 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.560961 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.560940879 +0000 UTC m=+34.125953852 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.598376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.598803 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.598820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.598835 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.598853 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.700904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.700939 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.700948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.700961 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.700970 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.749587 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.749597 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.749605 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.749864 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.749705 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:36 crc kubenswrapper[4632]: E1201 06:43:36.749786 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.802477 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.802508 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.802518 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.802532 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.802542 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.857577 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.857882 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.859895 4632 generic.go:334] "Generic (PLEG): container finished" podID="c2ef6994-166c-4195-af3f-a55124a4c441" containerID="621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647" exitCode=0 Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.859915 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerDied","Data":"621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.866153 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.876370 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.877574 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.890072 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.898554 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.904730 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.904751 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.904760 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.904772 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.904781 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:36Z","lastTransitionTime":"2025-12-01T06:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.907597 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.916516 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.924820 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.931558 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.939447 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.946423 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.954702 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.962078 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.969186 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.978246 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.987097 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:36 crc kubenswrapper[4632]: I1201 06:43:36.996141 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:36Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.004850 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.006310 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.006349 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.006379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.006396 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.006406 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.012761 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.020087 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.028569 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.040533 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.047185 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.055403 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.062007 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.069203 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.079110 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.088319 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.096714 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.108660 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.108796 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.108910 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.108998 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.109098 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.212001 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.212048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.212058 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.212073 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.212083 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.313692 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.313730 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.313739 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.313756 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.313765 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.415625 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.415652 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.415663 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.415677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.415688 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.517472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.517508 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.517517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.517531 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.517542 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.619572 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.619815 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.619877 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.619943 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.620009 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.721819 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.722094 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.722178 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.722251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.722311 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.824327 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.824375 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.824385 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.824399 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.824411 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.866746 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" event={"ID":"c2ef6994-166c-4195-af3f-a55124a4c441","Type":"ContainerStarted","Data":"3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.866795 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.866761 4632 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.883451 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.885330 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.892057 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.899719 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.909385 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.918393 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.926647 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.926677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.926687 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.926712 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.926721 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:37Z","lastTransitionTime":"2025-12-01T06:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.931603 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.940314 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.950237 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.958612 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.966263 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.976269 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.983014 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.991786 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:37 crc kubenswrapper[4632]: I1201 06:43:37.999203 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:37Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.008199 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.017622 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.025308 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.028314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.028345 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.028372 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.028387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.028397 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.034738 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.044247 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.052764 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.062334 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.070995 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.080498 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.088191 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.096944 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.104658 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.116725 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.124069 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.130200 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.130232 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.130242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.130254 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.130263 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.236148 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.236208 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.236220 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.236238 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.236248 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.338562 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.338596 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.338607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.338621 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.338630 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.440467 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.440500 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.440510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.440523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.440533 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.543264 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.543298 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.543314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.543331 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.543339 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.645309 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.645341 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.645362 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.645376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.645386 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.748795 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.748859 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.748870 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.748887 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.748899 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.749219 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.749241 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:38 crc kubenswrapper[4632]: E1201 06:43:38.749334 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.749397 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:38 crc kubenswrapper[4632]: E1201 06:43:38.749437 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:38 crc kubenswrapper[4632]: E1201 06:43:38.749498 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.851187 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.851226 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.851235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.851251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.851260 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.870399 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/0.log" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.872835 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd" exitCode=1 Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.872882 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.873451 4632 scope.go:117] "RemoveContainer" containerID="85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.883073 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.890913 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.898958 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.907588 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.915408 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.927141 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:38Z\\\",\\\"message\\\":\\\" handler 1 for removal\\\\nI1201 06:43:38.305562 5922 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:43:38.305568 5922 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:43:38.305586 5922 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:43:38.305617 5922 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:43:38.305621 5922 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:43:38.305637 5922 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:43:38.305653 5922 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:43:38.305663 5922 factory.go:656] Stopping watch factory\\\\nI1201 06:43:38.305673 5922 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:43:38.305672 5922 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:43:38.305678 5922 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:43:38.305683 5922 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:43:38.305688 5922 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:43:38.305689 5922 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:43:38.305695 5922 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.935140 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.944517 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.952964 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.953000 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.953010 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.953025 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.953035 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:38Z","lastTransitionTime":"2025-12-01T06:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.955176 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.964649 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.974045 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.987214 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:38 crc kubenswrapper[4632]: I1201 06:43:38.997096 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:38Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.005375 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.055205 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.055238 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.055247 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.055261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.055270 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.157727 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.157768 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.157777 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.157793 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.157802 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.259933 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.259973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.259993 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.260007 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.260016 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.361762 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.361814 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.361824 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.361843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.361855 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.463743 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.463788 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.463799 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.463815 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.463825 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.566185 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.566216 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.566225 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.566238 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.566247 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.668276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.668319 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.668330 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.668345 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.668371 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.770927 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.770965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.770973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.770996 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.771005 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.872423 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.872470 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.872482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.872499 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.872508 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.876117 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/1.log" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.876499 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/0.log" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.878209 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" exitCode=1 Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.878239 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.878279 4632 scope.go:117] "RemoveContainer" containerID="85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.878750 4632 scope.go:117] "RemoveContainer" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" Dec 01 06:43:39 crc kubenswrapper[4632]: E1201 06:43:39.878917 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.887270 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.896313 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.905399 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.914089 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.922087 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.929884 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.941835 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:38Z\\\",\\\"message\\\":\\\" handler 1 for removal\\\\nI1201 06:43:38.305562 5922 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:43:38.305568 5922 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:43:38.305586 5922 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:43:38.305617 5922 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:43:38.305621 5922 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:43:38.305637 5922 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:43:38.305653 5922 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:43:38.305663 5922 factory.go:656] Stopping watch factory\\\\nI1201 06:43:38.305673 5922 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:43:38.305672 5922 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:43:38.305678 5922 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:43:38.305683 5922 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:43:38.305688 5922 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:43:38.305689 5922 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:43:38.305695 5922 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.949039 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.957076 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.963612 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.972108 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.974214 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.974243 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.974254 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.974266 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.974273 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:39Z","lastTransitionTime":"2025-12-01T06:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.979925 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.987178 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:39 crc kubenswrapper[4632]: I1201 06:43:39.997905 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:39Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.076761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.076801 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.076810 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.076825 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.076835 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.178443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.178480 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.178489 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.178502 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.178512 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.280393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.280431 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.280440 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.280456 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.280464 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.382555 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.382591 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.382600 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.382612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.382641 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.484510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.484548 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.484557 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.484570 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.484579 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.586846 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.586888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.586898 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.586911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.586919 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.689040 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.689065 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.689074 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.689085 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.689094 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.750101 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.750172 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:40 crc kubenswrapper[4632]: E1201 06:43:40.750207 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.750235 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:40 crc kubenswrapper[4632]: E1201 06:43:40.750282 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:40 crc kubenswrapper[4632]: E1201 06:43:40.750317 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.763596 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://85539bcbe7fc3c32e249e22f9e0124c1427e6c5c8b4430e7411c4153d2531efd\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:38Z\\\",\\\"message\\\":\\\" handler 1 for removal\\\\nI1201 06:43:38.305562 5922 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1201 06:43:38.305568 5922 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1201 06:43:38.305586 5922 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1201 06:43:38.305617 5922 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1201 06:43:38.305621 5922 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1201 06:43:38.305637 5922 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1201 06:43:38.305653 5922 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1201 06:43:38.305663 5922 factory.go:656] Stopping watch factory\\\\nI1201 06:43:38.305673 5922 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1201 06:43:38.305672 5922 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1201 06:43:38.305678 5922 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1201 06:43:38.305683 5922 handler.go:208] Removed *v1.Node event handler 2\\\\nI1201 06:43:38.305688 5922 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1201 06:43:38.305689 5922 handler.go:208] Removed *v1.Node event handler 7\\\\nI1201 06:43:38.305695 5922 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.772124 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.780324 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.787950 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.793287 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.793316 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.793325 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.793336 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.793345 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.798662 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.809032 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.819242 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.826718 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.835792 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.843116 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.851756 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.860080 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.870017 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.880686 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.883060 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/1.log" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.886479 4632 scope.go:117] "RemoveContainer" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" Dec 01 06:43:40 crc kubenswrapper[4632]: E1201 06:43:40.886680 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895491 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895550 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895575 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895586 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.895605 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.903027 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.909816 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.919273 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.932524 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.942677 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.951290 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.959200 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.967304 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.975081 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.983895 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.992430 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.994872 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.997424 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.997475 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.997486 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.997499 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:40 crc kubenswrapper[4632]: I1201 06:43:40.997509 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:40Z","lastTransitionTime":"2025-12-01T06:43:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.001066 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.009168 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.098902 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.098932 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.098941 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.098954 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.098963 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.200660 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.200681 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.200689 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.200698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.200705 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.303063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.303094 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.303103 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.303126 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.303136 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.404540 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.404570 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.404579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.404589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.404598 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.506344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.506396 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.506404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.506416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.506425 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.607898 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.607953 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.607962 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.607976 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.607993 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.709758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.709788 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.709797 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.709811 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.709818 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.811924 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.811974 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.811995 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.812007 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.812015 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.888925 4632 scope.go:117] "RemoveContainer" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" Dec 01 06:43:41 crc kubenswrapper[4632]: E1201 06:43:41.889142 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.914308 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.914338 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.914346 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.914374 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.914384 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:41Z","lastTransitionTime":"2025-12-01T06:43:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.941987 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9"] Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.942333 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.943756 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.944781 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.951548 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.958866 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.966687 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.976526 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.983738 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:41 crc kubenswrapper[4632]: I1201 06:43:41.991662 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:41Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.001516 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.010505 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.016048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.016072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.016081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.016097 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.016106 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.021644 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.029756 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.038120 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.046310 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.054507 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.066786 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.073646 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.108746 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.108880 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zf6z5\" (UniqueName: \"kubernetes.io/projected/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-kube-api-access-zf6z5\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.108925 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.109002 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.117929 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.117958 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.117966 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.117989 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.117999 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.210428 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zf6z5\" (UniqueName: \"kubernetes.io/projected/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-kube-api-access-zf6z5\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.210458 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.210491 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.210510 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.211055 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-env-overrides\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.211080 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.214604 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.219748 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.219834 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.219913 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.219973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.220037 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.223939 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zf6z5\" (UniqueName: \"kubernetes.io/projected/5b835e6d-98fc-4bfb-bb49-a463c40c06f9-kube-api-access-zf6z5\") pod \"ovnkube-control-plane-749d76644c-4jmb9\" (UID: \"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.251974 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" Dec 01 06:43:42 crc kubenswrapper[4632]: W1201 06:43:42.263221 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b835e6d_98fc_4bfb_bb49_a463c40c06f9.slice/crio-f34e4bb182646a8803654dbe752c57837d7d30e7b294ddcf76204bf501a49c54 WatchSource:0}: Error finding container f34e4bb182646a8803654dbe752c57837d7d30e7b294ddcf76204bf501a49c54: Status 404 returned error can't find the container with id f34e4bb182646a8803654dbe752c57837d7d30e7b294ddcf76204bf501a49c54 Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.272734 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.272823 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.272880 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.272943 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.273009 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.281887 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.284855 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.284885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.284895 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.284909 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.284918 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.296789 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.299213 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.299245 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.299255 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.299269 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.299277 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.307038 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.309239 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.309264 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.309273 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.309286 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.309295 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.316847 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.319063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.319094 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.319104 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.319116 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.319126 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.327023 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.327135 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.328156 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.328183 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.328193 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.328205 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.328214 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.430328 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.430584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.430592 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.430607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.430617 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.532698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.532727 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.532736 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.532750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.532758 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.634927 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.634957 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.634965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.634976 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.635010 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.737424 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.737460 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.737468 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.737481 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.737492 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.749795 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.749902 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.749909 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.749811 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.750067 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:42 crc kubenswrapper[4632]: E1201 06:43:42.750135 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.839435 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.839464 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.839472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.839484 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.839499 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.891398 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" event={"ID":"5b835e6d-98fc-4bfb-bb49-a463c40c06f9","Type":"ContainerStarted","Data":"88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.891432 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" event={"ID":"5b835e6d-98fc-4bfb-bb49-a463c40c06f9","Type":"ContainerStarted","Data":"1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.891459 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" event={"ID":"5b835e6d-98fc-4bfb-bb49-a463c40c06f9","Type":"ContainerStarted","Data":"f34e4bb182646a8803654dbe752c57837d7d30e7b294ddcf76204bf501a49c54"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.901418 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.909516 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.917498 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.933073 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941083 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941111 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941122 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941135 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941142 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:42Z","lastTransitionTime":"2025-12-01T06:43:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.941543 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.950322 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.962774 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.971567 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.980680 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.989166 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:42 crc kubenswrapper[4632]: I1201 06:43:42.997024 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:42Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.003850 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.011787 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.018076 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.025893 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.043579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.043620 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.043632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.043646 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.043661 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.145425 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.145458 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.145468 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.145481 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.145491 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.246824 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.246860 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.246871 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.246886 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.246896 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.349232 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.349266 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.349275 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.349288 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.349296 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.451344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.451394 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.451403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.451416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.451427 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.553271 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.553305 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.553314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.553326 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.553334 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.655710 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.656095 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.656211 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.656322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.656418 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.688406 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nqqbv"] Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.688783 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: E1201 06:43:43.688838 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.701847 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.711115 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.719375 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.729527 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.737170 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.745821 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.758230 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.758256 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.758265 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.758278 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.758287 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.759120 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.767223 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.776727 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.785448 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.793080 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.800729 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.806713 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.814346 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.820841 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.825213 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmv4x\" (UniqueName: \"kubernetes.io/projected/559abf1e-dc19-40e9-b75b-9a327d661dc0-kube-api-access-qmv4x\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.825262 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.827584 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:43Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.859946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.859970 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.859991 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.860004 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.860014 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.925756 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmv4x\" (UniqueName: \"kubernetes.io/projected/559abf1e-dc19-40e9-b75b-9a327d661dc0-kube-api-access-qmv4x\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.925794 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: E1201 06:43:43.925911 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:43 crc kubenswrapper[4632]: E1201 06:43:43.925957 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:44.425945176 +0000 UTC m=+33.990958149 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.938738 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmv4x\" (UniqueName: \"kubernetes.io/projected/559abf1e-dc19-40e9-b75b-9a327d661dc0-kube-api-access-qmv4x\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.961948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.961999 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.962010 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.962027 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:43 crc kubenswrapper[4632]: I1201 06:43:43.962039 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:43Z","lastTransitionTime":"2025-12-01T06:43:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.063832 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.063864 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.063872 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.063885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.063893 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.165677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.165714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.165723 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.165737 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.165745 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.267401 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.267441 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.267450 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.267462 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.267472 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.369437 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.369472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.369480 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.369493 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.369502 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.430058 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.430179 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.430229 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:45.43021761 +0000 UTC m=+34.995230582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.472036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.472073 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.472081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.472095 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.472104 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.530529 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.530657 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:44:00.530640375 +0000 UTC m=+50.095653348 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.573887 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.573921 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.573931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.573946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.573958 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.631623 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.631662 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.631684 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.631714 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631783 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631813 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631827 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:00.631815447 +0000 UTC m=+50.196828421 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631828 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631839 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631860 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631845 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631873 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.632135 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:00.632114722 +0000 UTC m=+50.197127705 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.631871 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.632225 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:00.632205662 +0000 UTC m=+50.197218635 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.632247 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:00.632239267 +0000 UTC m=+50.197252250 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.675688 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.675724 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.675733 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.675747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.675756 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.749992 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.750106 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.750169 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.750131 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.750225 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:44 crc kubenswrapper[4632]: E1201 06:43:44.750301 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.777735 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.777771 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.777779 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.777793 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.777802 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.879951 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.880001 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.880013 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.880026 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.880034 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.982553 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.982596 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.982606 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.982621 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:44 crc kubenswrapper[4632]: I1201 06:43:44.982630 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:44Z","lastTransitionTime":"2025-12-01T06:43:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.084574 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.084609 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.084617 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.084629 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.084638 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.186191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.186225 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.186235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.186248 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.186258 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.287862 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.287900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.287908 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.287923 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.287932 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.390131 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.390166 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.390177 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.390192 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.390201 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.438825 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:45 crc kubenswrapper[4632]: E1201 06:43:45.438997 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:45 crc kubenswrapper[4632]: E1201 06:43:45.439054 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:47.439040551 +0000 UTC m=+37.004053525 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.492373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.492399 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.492407 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.492419 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.492429 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.594198 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.594234 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.594246 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.594258 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.594265 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.696383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.696418 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.696428 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.696441 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.696451 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.750068 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:45 crc kubenswrapper[4632]: E1201 06:43:45.750169 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.798820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.798850 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.798859 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.798871 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.798879 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.901158 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.901191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.901199 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.901210 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:45 crc kubenswrapper[4632]: I1201 06:43:45.901219 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:45Z","lastTransitionTime":"2025-12-01T06:43:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.002711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.002747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.002756 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.002768 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.002777 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.104581 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.104615 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.104624 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.104638 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.104646 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.206840 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.206874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.206883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.206897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.206906 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.308199 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.308232 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.308240 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.308252 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.308261 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.409542 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.409571 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.409580 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.409590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.409597 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.511211 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.511251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.511261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.511275 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.511284 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.612884 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.612917 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.612925 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.612936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.612944 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.714589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.714628 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.714636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.714649 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.714657 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.750023 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.750080 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:46 crc kubenswrapper[4632]: E1201 06:43:46.750120 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:46 crc kubenswrapper[4632]: E1201 06:43:46.750170 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.750243 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:46 crc kubenswrapper[4632]: E1201 06:43:46.750559 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.816012 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.816050 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.816059 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.816071 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.816080 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.918376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.918415 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.918426 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.918439 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:46 crc kubenswrapper[4632]: I1201 06:43:46.918448 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:46Z","lastTransitionTime":"2025-12-01T06:43:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.020333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.020452 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.020465 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.020493 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.020509 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.123079 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.123127 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.123138 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.123154 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.123164 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.225468 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.225501 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.225510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.225521 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.225529 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.327213 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.327256 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.327265 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.327279 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.327288 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.428758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.428808 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.428819 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.428832 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.428840 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.457798 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:47 crc kubenswrapper[4632]: E1201 06:43:47.457942 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:47 crc kubenswrapper[4632]: E1201 06:43:47.458018 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:51.457998886 +0000 UTC m=+41.023011849 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.530758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.530839 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.530863 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.530876 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.530885 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.633073 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.633102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.633111 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.633139 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.633171 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.735434 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.735485 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.735499 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.735516 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.735527 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.749727 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:47 crc kubenswrapper[4632]: E1201 06:43:47.749825 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.836798 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.836827 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.836834 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.836851 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.836860 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.938141 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.938467 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.938567 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.938655 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:47 crc kubenswrapper[4632]: I1201 06:43:47.938757 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:47Z","lastTransitionTime":"2025-12-01T06:43:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.040658 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.040696 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.040706 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.040721 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.040731 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.142563 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.142598 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.142606 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.142620 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.142628 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.244724 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.244760 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.244769 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.244782 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.244791 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.347255 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.347294 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.347303 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.347316 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.347328 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.449032 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.449064 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.449072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.449084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.449092 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.551099 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.551135 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.551149 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.551162 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.551171 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.653405 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.653578 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.653653 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.653747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.653808 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.750274 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.750279 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:48 crc kubenswrapper[4632]: E1201 06:43:48.750434 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.750465 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:48 crc kubenswrapper[4632]: E1201 06:43:48.750612 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:48 crc kubenswrapper[4632]: E1201 06:43:48.750695 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.755115 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.755156 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.755169 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.755183 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.755193 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.857621 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.857742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.857820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.857892 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.858673 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.960973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.961039 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.961049 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.961063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:48 crc kubenswrapper[4632]: I1201 06:43:48.961093 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:48Z","lastTransitionTime":"2025-12-01T06:43:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.063276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.063321 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.063333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.063378 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.063391 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.165651 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.165713 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.165726 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.165742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.165751 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.267903 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.267949 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.267960 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.267977 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.268001 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.369781 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.369820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.369830 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.369841 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.369850 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.471655 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.471692 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.471701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.471714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.471724 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.573452 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.573510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.573523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.573541 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.573552 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.675801 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.675851 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.675866 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.675884 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.675897 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.749682 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:49 crc kubenswrapper[4632]: E1201 06:43:49.749811 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.777811 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.777850 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.777859 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.777872 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.777883 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.879662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.879690 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.879698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.879712 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.879721 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.981711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.981743 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.981752 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.981766 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:49 crc kubenswrapper[4632]: I1201 06:43:49.981774 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:49Z","lastTransitionTime":"2025-12-01T06:43:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.083471 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.083509 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.083536 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.083549 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.083558 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.185064 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.185096 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.185104 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.185114 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.185122 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.286835 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.286864 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.286872 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.286883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.286891 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.388583 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.388615 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.388624 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.388637 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.388646 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.490586 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.490619 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.490627 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.490640 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.490650 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.592409 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.592445 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.592452 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.592466 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.592475 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.693894 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.693916 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.693925 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.693936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.693944 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.750058 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:50 crc kubenswrapper[4632]: E1201 06:43:50.750141 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.750205 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.750275 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:50 crc kubenswrapper[4632]: E1201 06:43:50.750292 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:50 crc kubenswrapper[4632]: E1201 06:43:50.750487 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.759470 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.766659 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.778177 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.788097 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.795869 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.795900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.795912 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.795926 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.795936 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.796499 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.804007 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.813937 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.823515 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.836146 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.844524 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.851802 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.859727 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.868100 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.875384 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.883612 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.890322 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:50Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.897513 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.897555 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.897565 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.897577 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.897586 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.999500 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.999538 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.999548 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.999561 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:50 crc kubenswrapper[4632]: I1201 06:43:50.999570 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:50Z","lastTransitionTime":"2025-12-01T06:43:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.101166 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.101194 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.101203 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.101217 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.101226 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.202683 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.202715 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.202723 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.202735 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.202743 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.304582 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.304615 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.304623 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.304637 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.304645 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.406071 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.406106 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.406115 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.406129 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.406139 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.492102 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:51 crc kubenswrapper[4632]: E1201 06:43:51.492213 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:51 crc kubenswrapper[4632]: E1201 06:43:51.492278 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:43:59.492261694 +0000 UTC m=+49.057274667 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.508020 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.508051 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.508062 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.508075 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.508084 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.610009 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.610038 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.610047 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.610059 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.610069 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.712004 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.712215 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.712293 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.712377 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.712453 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.749488 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:51 crc kubenswrapper[4632]: E1201 06:43:51.749587 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.814178 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.814389 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.814463 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.814527 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.814592 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.916825 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.916888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.916897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.916911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:51 crc kubenswrapper[4632]: I1201 06:43:51.916921 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:51Z","lastTransitionTime":"2025-12-01T06:43:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.018583 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.018614 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.018624 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.018636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.018644 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.120948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.120998 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.121009 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.121025 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.121035 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.222482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.222566 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.222579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.222607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.222622 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.324494 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.324546 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.324557 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.324570 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.324579 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.426427 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.426457 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.426465 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.426477 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.426486 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.527882 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.527913 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.527921 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.527933 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.527940 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.532715 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.532734 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.532743 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.532751 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.532758 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.541348 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.543560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.543594 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.543603 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.543616 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.543624 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.551145 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.553453 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.553478 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.553486 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.553497 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.553505 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.562411 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.564452 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.564481 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.564491 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.564518 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.564525 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.572241 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.574384 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.574444 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.574455 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.574466 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.574474 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.582612 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:52Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:52Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.582711 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.629096 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.629121 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.629129 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.629140 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.629147 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.730469 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.730504 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.730513 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.730524 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.730531 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.749877 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.749892 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.749938 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.750046 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.750132 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:52 crc kubenswrapper[4632]: E1201 06:43:52.750186 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.832215 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.832251 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.832261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.832273 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.832297 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.934126 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.934157 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.934166 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.934176 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:52 crc kubenswrapper[4632]: I1201 06:43:52.934184 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:52Z","lastTransitionTime":"2025-12-01T06:43:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.035974 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.036053 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.036063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.036075 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.036084 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.137968 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.138021 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.138030 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.138043 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.138050 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.239327 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.239371 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.239379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.239391 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.239400 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.341322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.341383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.341393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.341407 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.341415 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.443694 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.443731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.443739 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.443752 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.443760 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.545201 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.545237 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.545246 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.545259 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.545267 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.646996 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.647043 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.647054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.647072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.647084 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749374 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749410 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749419 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749431 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749439 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.749414 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:53 crc kubenswrapper[4632]: E1201 06:43:53.749622 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.851276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.851304 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.851313 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.851322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.851329 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.953459 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.953485 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.953493 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.953502 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:53 crc kubenswrapper[4632]: I1201 06:43:53.953509 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:53Z","lastTransitionTime":"2025-12-01T06:43:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.055175 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.055232 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.055246 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.055262 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.055274 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.157284 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.157322 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.157333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.157347 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.157372 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.259197 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.259227 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.259235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.259248 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.259257 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.360433 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.360466 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.360475 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.360488 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.360496 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.462629 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.462662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.462671 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.462684 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.462693 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.564383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.564413 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.564443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.564453 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.564461 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.665783 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.665818 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.665827 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.665839 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.665847 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.749319 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.749415 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:54 crc kubenswrapper[4632]: E1201 06:43:54.749512 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.749570 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:54 crc kubenswrapper[4632]: E1201 06:43:54.749612 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:54 crc kubenswrapper[4632]: E1201 06:43:54.749686 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.767525 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.767558 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.767570 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.767581 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.767593 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.869672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.869894 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.869903 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.869915 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.869924 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.971622 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.971653 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.971662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.971674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:54 crc kubenswrapper[4632]: I1201 06:43:54.971681 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:54Z","lastTransitionTime":"2025-12-01T06:43:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.073253 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.073293 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.073301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.073314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.073324 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.175627 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.175663 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.175672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.175687 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.175698 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.277241 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.277276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.277284 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.277297 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.277306 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.379366 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.379401 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.379409 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.379422 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.379442 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.481070 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.481107 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.481116 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.481129 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.481138 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.583062 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.583096 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.583105 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.583117 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.583139 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.685644 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.685694 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.685703 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.685715 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.685723 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.749542 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:55 crc kubenswrapper[4632]: E1201 06:43:55.749633 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.787687 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.787725 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.787736 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.787750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.787761 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.889808 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.889852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.889862 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.889875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.889885 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.991430 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.991480 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.991488 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.991502 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:55 crc kubenswrapper[4632]: I1201 06:43:55.991509 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:55Z","lastTransitionTime":"2025-12-01T06:43:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.093784 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.093826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.093838 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.093852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.093865 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.196078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.196119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.196130 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.196151 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.196160 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.298113 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.298144 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.298152 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.298164 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.298172 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.399753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.399778 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.399787 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.399798 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.399806 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.501698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.501729 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.501737 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.501750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.501759 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.603459 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.603496 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.603505 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.603517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.603528 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.705344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.705397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.705406 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.705419 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.705428 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.749925 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.749961 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:56 crc kubenswrapper[4632]: E1201 06:43:56.750055 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.750090 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:56 crc kubenswrapper[4632]: E1201 06:43:56.750186 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:56 crc kubenswrapper[4632]: E1201 06:43:56.750478 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.750651 4632 scope.go:117] "RemoveContainer" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.806691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.806721 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.806729 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.806742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.806752 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.909112 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.909149 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.909160 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.909173 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.909183 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:56Z","lastTransitionTime":"2025-12-01T06:43:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.922580 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/1.log" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.924835 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b"} Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.925164 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.935774 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.944765 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.953923 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.965515 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.977000 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.985904 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:56 crc kubenswrapper[4632]: I1201 06:43:56.999148 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:56Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.009544 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.010651 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.010753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.010832 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.010897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.010952 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.019455 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.036263 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.048816 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.057501 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.064278 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.072021 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.078161 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.085226 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.112640 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.112673 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.112681 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.112693 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.112702 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.214009 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.214041 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.214050 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.214062 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.214071 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.316077 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.316114 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.316123 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.316136 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.316147 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.418187 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.418222 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.418231 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.418245 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.418254 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.520102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.520128 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.520138 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.520148 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.520156 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.621849 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.621877 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.621888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.621898 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.621905 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.723826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.723845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.723853 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.723863 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.723871 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.749726 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:57 crc kubenswrapper[4632]: E1201 06:43:57.749818 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.825560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.825596 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.825604 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.825616 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.825627 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.927079 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.927101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.927108 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.927117 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.927125 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:57Z","lastTransitionTime":"2025-12-01T06:43:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.929537 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/2.log" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.930151 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/1.log" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.932278 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" exitCode=1 Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.932324 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b"} Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.932482 4632 scope.go:117] "RemoveContainer" containerID="7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.933006 4632 scope.go:117] "RemoveContainer" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" Dec 01 06:43:57 crc kubenswrapper[4632]: E1201 06:43:57.933275 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.943808 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.950811 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.958371 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.965864 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.974958 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.984062 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.990442 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:57 crc kubenswrapper[4632]: I1201 06:43:57.997312 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:57Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.005508 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.013674 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.021293 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029206 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029247 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029259 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029274 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029286 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.029593 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.037407 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.045755 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.057855 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e387aea0c4554e79d35c792619e3f2ec595730ca940ba84c1897bad221b960c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:39Z\\\",\\\"message\\\":\\\"Service_default/kubernetes_TCP_node_switch_crc\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"default/kubernetes\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.1\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{services.Addr{IP:\\\\\\\"192.168.126.11\\\\\\\", Port:6443, Template:(*services.Template)(nil)}}}}, Templates:services.TemplateMap(nil), Switches:[]string{\\\\\\\"crc\\\\\\\"}, Routers:[]string(nil), Groups:[]string(nil)}}\\\\nF1201 06:43:39.481179 6053 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.064042 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.131514 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.131544 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.131553 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.131565 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.131574 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.234014 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.234047 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.234056 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.234069 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.234077 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.335885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.335913 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.335922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.335933 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.335943 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.438084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.438156 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.438170 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.438183 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.438191 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.540059 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.540093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.540102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.540113 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.540120 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.642636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.642665 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.642674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.642685 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.642695 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.744764 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.744792 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.744800 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.744809 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.744817 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.750253 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.750328 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:43:58 crc kubenswrapper[4632]: E1201 06:43:58.750371 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.750400 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:43:58 crc kubenswrapper[4632]: E1201 06:43:58.750469 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:43:58 crc kubenswrapper[4632]: E1201 06:43:58.750560 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.846076 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.846133 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.846143 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.846153 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.846160 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.936446 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/2.log" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.938905 4632 scope.go:117] "RemoveContainer" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" Dec 01 06:43:58 crc kubenswrapper[4632]: E1201 06:43:58.939050 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947223 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947683 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947707 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947727 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.947735 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:58Z","lastTransitionTime":"2025-12-01T06:43:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.954664 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.964196 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.972337 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.980289 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.987751 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:58 crc kubenswrapper[4632]: I1201 06:43:58.995045 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:58Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.002641 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.014524 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.022348 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.028843 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.036923 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.043193 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050085 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050395 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050421 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050430 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050444 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.050455 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.058669 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.065640 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:43:59Z is after 2025-08-24T17:21:41Z" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.152635 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.152664 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.152676 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.152691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.152700 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.254971 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.255024 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.255037 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.255057 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.255070 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.356783 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.356815 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.356823 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.356838 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.356848 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.458742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.458777 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.458786 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.458798 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.458809 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560388 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560426 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560436 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560448 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560458 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.560501 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:59 crc kubenswrapper[4632]: E1201 06:43:59.560605 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:59 crc kubenswrapper[4632]: E1201 06:43:59.560652 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:15.560638838 +0000 UTC m=+65.125651811 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.662612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.662651 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.662661 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.662677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.662687 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.749478 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:43:59 crc kubenswrapper[4632]: E1201 06:43:59.749584 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.764078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.764109 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.764118 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.764130 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.764139 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.866679 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.866707 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.866716 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.866728 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.866736 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.968836 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.968891 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.968900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.968936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:43:59 crc kubenswrapper[4632]: I1201 06:43:59.968947 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:43:59Z","lastTransitionTime":"2025-12-01T06:43:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.070800 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.070847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.070856 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.070869 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.070879 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.172169 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.172196 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.172205 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.172215 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.172223 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.274045 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.274092 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.274104 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.274127 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.274137 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.375703 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.375733 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.375742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.375753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.375761 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.477617 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.477674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.477685 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.477699 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.477708 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.572282 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.572454 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:44:32.57243393 +0000 UTC m=+82.137446913 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.579323 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.579372 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.579383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.579394 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.579401 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.673662 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.673712 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.673732 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.673755 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673798 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673822 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673833 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673837 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673876 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:32.673866437 +0000 UTC m=+82.238879400 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673883 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673890 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:32.673884252 +0000 UTC m=+82.238897225 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673933 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673955 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673968 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.673936 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:32.673923235 +0000 UTC m=+82.238936207 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.674028 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:32.674015869 +0000 UTC m=+82.239028852 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.681341 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.681379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.681390 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.681404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.681413 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.749722 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.749747 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.749811 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.749852 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.749944 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:00 crc kubenswrapper[4632]: E1201 06:44:00.750044 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.761921 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.770136 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.777948 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.782577 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.782627 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.782637 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.782654 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.782663 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.788183 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.795034 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.806921 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.817420 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.825556 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.832903 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.840203 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.847468 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.855561 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.863537 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.872808 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.880158 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.885301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.885347 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.885376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.885396 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.885409 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.887790 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:00Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.988514 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.988552 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.988563 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.988577 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:00 crc kubenswrapper[4632]: I1201 06:44:00.988592 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:00Z","lastTransitionTime":"2025-12-01T06:44:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.090887 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.091033 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.091093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.091157 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.091213 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.193195 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.193224 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.193232 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.193247 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.193255 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.295235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.295268 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.295275 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.295290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.295298 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.397401 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.397431 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.397439 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.397453 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.397461 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.498584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.498689 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.498747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.498824 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.498886 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.565409 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.571391 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.575094 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.583034 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.590788 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.598857 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.600985 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.601040 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.601050 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.601064 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.601072 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.610952 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.619370 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.627498 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.633905 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.640824 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.654582 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.672587 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.687815 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.697483 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.703127 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.703235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.703301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.703389 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.703447 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.705610 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.715939 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.723266 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:01Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.749681 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:01 crc kubenswrapper[4632]: E1201 06:44:01.749780 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.805728 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.805759 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.805768 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.805779 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.805789 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.907602 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.907638 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.907649 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.907664 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:01 crc kubenswrapper[4632]: I1201 06:44:01.907673 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:01Z","lastTransitionTime":"2025-12-01T06:44:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.009331 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.009383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.009392 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.009402 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.009411 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.111093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.111136 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.111145 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.111160 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.111169 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.213631 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.213698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.213708 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.213722 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.213731 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.315128 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.315159 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.315166 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.315177 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.315185 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.416845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.416868 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.416876 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.416885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.416893 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.518730 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.518758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.518766 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.518775 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.518783 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.620646 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.620679 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.620688 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.620699 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.620870 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.723186 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.723216 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.723225 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.723236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.723244 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.750037 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.750036 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.750101 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.750179 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.750302 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.750511 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.824884 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.824957 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.824966 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.824983 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.825003 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.898549 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.898580 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.898589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.898600 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.898608 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.906927 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.909492 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.909539 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.909550 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.909567 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.909577 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.918230 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.920413 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.920446 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.920457 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.920473 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.920485 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.928396 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.930583 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.930639 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.930649 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.930661 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.930670 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.938095 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.940339 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.940405 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.940417 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.940430 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.940438 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.948668 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:02Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:02Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:02 crc kubenswrapper[4632]: E1201 06:44:02.948779 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.949552 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.949580 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.949590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.949604 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:02 crc kubenswrapper[4632]: I1201 06:44:02.949612 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:02Z","lastTransitionTime":"2025-12-01T06:44:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.051265 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.051292 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.051301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.051313 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.051322 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.153307 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.153335 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.153342 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.153363 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.153372 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.255009 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.255035 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.255045 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.255056 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.255065 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.356838 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.356891 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.356901 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.356920 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.356930 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.459791 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.459833 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.459842 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.459857 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.459868 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.561466 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.561495 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.561503 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.561515 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.561522 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.663295 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.663348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.663373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.663390 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.663400 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.750107 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:03 crc kubenswrapper[4632]: E1201 06:44:03.750210 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.765664 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.765695 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.765704 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.765716 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.765723 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.868118 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.868150 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.868159 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.868174 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.868183 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.970084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.970114 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.970122 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.970133 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:03 crc kubenswrapper[4632]: I1201 06:44:03.970140 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:03Z","lastTransitionTime":"2025-12-01T06:44:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.071931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.071979 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.071988 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.072019 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.072032 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.174867 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.174918 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.174929 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.174946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.174961 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.276976 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.277023 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.277033 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.277049 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.277057 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.378892 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.378926 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.378935 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.378946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.378954 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.480830 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.480883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.480893 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.480912 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.480922 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.582835 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.582874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.582883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.582912 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.582922 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.685346 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.685400 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.685409 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.685425 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.685435 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.749538 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.749546 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:04 crc kubenswrapper[4632]: E1201 06:44:04.749687 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.749562 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:04 crc kubenswrapper[4632]: E1201 06:44:04.749817 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:04 crc kubenswrapper[4632]: E1201 06:44:04.749898 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.787706 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.787751 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.787761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.787777 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.787786 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.890166 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.890212 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.890224 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.890241 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.890259 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.992344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.992394 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.992405 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.992419 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:04 crc kubenswrapper[4632]: I1201 06:44:04.992428 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:04Z","lastTransitionTime":"2025-12-01T06:44:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.094453 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.094483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.094491 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.094504 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.094512 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.196037 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.196066 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.196074 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.196105 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.196113 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.297433 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.297469 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.297477 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.297490 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.297498 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.399885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.399938 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.399947 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.399960 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.399970 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.501607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.501643 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.501651 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.501663 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.501671 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.603756 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.603792 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.603804 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.603816 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.603827 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.706013 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.706046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.706054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.706065 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.706072 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.749723 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:05 crc kubenswrapper[4632]: E1201 06:44:05.749832 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.807343 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.807383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.807392 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.807403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.807410 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.909399 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.909437 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.909447 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.909459 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:05 crc kubenswrapper[4632]: I1201 06:44:05.909469 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:05Z","lastTransitionTime":"2025-12-01T06:44:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.011432 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.011461 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.011470 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.011483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.011491 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.113244 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.113279 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.113289 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.113302 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.113310 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.215503 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.215534 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.215544 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.215556 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.215566 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.316880 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.316922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.316933 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.316947 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.316958 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.418667 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.418708 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.418720 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.418735 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.418745 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.520483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.520514 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.520523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.520534 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.520542 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.622552 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.622599 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.622607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.622619 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.622627 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.724806 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.724843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.724852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.724864 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.724872 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.749287 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.749295 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:06 crc kubenswrapper[4632]: E1201 06:44:06.749405 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.749437 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:06 crc kubenswrapper[4632]: E1201 06:44:06.749623 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:06 crc kubenswrapper[4632]: E1201 06:44:06.749532 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.826590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.826808 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.826873 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.826930 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.826980 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.928654 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.928845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.928903 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.928985 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:06 crc kubenswrapper[4632]: I1201 06:44:06.929055 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:06Z","lastTransitionTime":"2025-12-01T06:44:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.030754 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.030783 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.030792 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.030813 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.030820 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.133207 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.133240 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.133248 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.133261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.133270 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.235282 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.235314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.235323 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.235335 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.235343 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.337333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.337393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.337403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.337416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.337424 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.438879 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.438913 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.438931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.438945 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.438953 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.540757 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.540800 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.540810 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.540825 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.540833 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.642689 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.642731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.642741 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.642754 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.642761 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.744977 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.745027 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.745036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.745048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.745056 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.749175 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:07 crc kubenswrapper[4632]: E1201 06:44:07.749279 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.846799 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.846833 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.846841 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.846852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.846859 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.949046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.949090 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.949101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.949115 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:07 crc kubenswrapper[4632]: I1201 06:44:07.949126 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:07Z","lastTransitionTime":"2025-12-01T06:44:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.050604 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.050640 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.050667 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.050679 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.050688 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.152281 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.152324 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.152333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.152348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.152372 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.254638 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.254682 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.254691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.254703 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.254711 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.356544 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.356586 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.356616 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.356632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.356641 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.458579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.458616 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.458624 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.458636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.458644 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.560374 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.560411 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.560421 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.560444 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.560457 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.662684 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.662718 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.662726 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.662738 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.662746 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.750087 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.750123 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:08 crc kubenswrapper[4632]: E1201 06:44:08.750203 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.750247 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:08 crc kubenswrapper[4632]: E1201 06:44:08.750319 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:08 crc kubenswrapper[4632]: E1201 06:44:08.750445 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.764666 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.764711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.764721 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.764732 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.764740 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.866760 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.866802 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.866812 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.866826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.866837 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.968867 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.968897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.968906 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.968917 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:08 crc kubenswrapper[4632]: I1201 06:44:08.968925 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:08Z","lastTransitionTime":"2025-12-01T06:44:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.070893 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.070918 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.070926 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.070939 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.070946 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.173054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.173111 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.173124 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.173137 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.173146 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.274905 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.274936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.274944 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.274958 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.274967 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.376695 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.376729 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.376738 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.376750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.376758 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.479048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.479101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.479111 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.479124 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.479133 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.580557 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.580593 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.580602 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.580623 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.580633 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.682560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.682584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.682592 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.682604 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.682613 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.749529 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:09 crc kubenswrapper[4632]: E1201 06:44:09.749632 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.784250 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.784297 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.784306 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.784317 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.784326 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.886156 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.886196 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.886204 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.886218 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.886227 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.987461 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.987494 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.987504 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.987515 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:09 crc kubenswrapper[4632]: I1201 06:44:09.987526 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:09Z","lastTransitionTime":"2025-12-01T06:44:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.089301 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.089326 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.089334 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.089344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.089369 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.190314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.190369 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.190379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.190391 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.190400 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.291991 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.292024 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.292032 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.292043 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.292049 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.394101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.394135 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.394144 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.394157 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.394167 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.496228 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.496612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.496670 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.496726 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.496774 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.598940 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.598973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.598985 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.599005 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.599026 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.700879 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.700931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.700941 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.700954 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.700962 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.749296 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:10 crc kubenswrapper[4632]: E1201 06:44:10.749408 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.749455 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.749486 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:10 crc kubenswrapper[4632]: E1201 06:44:10.749508 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:10 crc kubenswrapper[4632]: E1201 06:44:10.749574 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.758512 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.767989 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.774518 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.781965 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.790235 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.798013 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.802441 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.802473 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.802483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.802497 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.802506 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.805675 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.819596 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.826086 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.837772 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.845419 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.853238 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.860839 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.868396 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.875693 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.883282 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.889269 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:10Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.904511 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.904542 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.904553 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.904564 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:10 crc kubenswrapper[4632]: I1201 06:44:10.904573 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:10Z","lastTransitionTime":"2025-12-01T06:44:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.006007 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.006042 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.006051 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.006063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.006070 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.108066 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.108099 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.108107 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.108119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.108128 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.210250 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.210282 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.210291 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.210302 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.210310 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.311839 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.312093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.312102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.312115 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.312142 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.414643 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.414678 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.414686 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.414698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.414706 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.516641 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.516668 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.516677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.516689 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.516698 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.618161 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.618189 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.618198 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.618209 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.618230 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.719795 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.719825 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.719833 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.719845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.719853 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.749457 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:11 crc kubenswrapper[4632]: E1201 06:44:11.749572 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.821569 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.821597 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.821607 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.821617 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.821626 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.922987 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.923037 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.923046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.923058 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:11 crc kubenswrapper[4632]: I1201 06:44:11.923067 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:11Z","lastTransitionTime":"2025-12-01T06:44:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.025247 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.025285 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.025330 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.025344 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.025373 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.126941 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.126979 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.126988 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.127036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.127047 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.228615 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.228644 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.228653 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.228664 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.228672 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.330719 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.330771 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.330780 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.330793 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.330802 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.432636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.432674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.432682 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.432694 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.432702 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.534839 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.534890 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.534899 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.534911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.534919 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.636934 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.636970 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.636978 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.636991 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.637012 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.739036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.739075 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.739082 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.739095 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.739104 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.749460 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.749507 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:12 crc kubenswrapper[4632]: E1201 06:44:12.749553 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.749512 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:12 crc kubenswrapper[4632]: E1201 06:44:12.749610 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:12 crc kubenswrapper[4632]: E1201 06:44:12.749677 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.840841 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.840876 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.840886 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.840909 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.840918 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.942691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.942737 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.942745 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.942757 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:12 crc kubenswrapper[4632]: I1201 06:44:12.942765 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:12Z","lastTransitionTime":"2025-12-01T06:44:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.044163 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.044203 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.044212 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.044224 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.044233 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.058029 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.058067 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.058074 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.058086 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.058094 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.066974 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.069121 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.069145 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.069153 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.069163 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.069170 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.077401 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.079849 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.079882 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.079891 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.079904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.079913 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.092013 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.094821 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.094876 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.094886 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.094908 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.094932 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.103386 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.106048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.106081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.106091 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.106102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.106110 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.114292 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:13Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.114407 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.146332 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.146376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.146386 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.146404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.146412 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.248216 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.248260 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.248272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.248286 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.248296 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.349812 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.349837 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.349844 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.349854 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.349862 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.451805 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.451830 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.451838 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.451868 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.451878 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.553430 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.553461 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.553470 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.553482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.553490 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.655451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.655484 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.655494 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.655507 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.655515 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.749908 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.750029 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.750443 4632 scope.go:117] "RemoveContainer" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" Dec 01 06:44:13 crc kubenswrapper[4632]: E1201 06:44:13.750558 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.757348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.757388 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.757397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.757406 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.757414 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.859048 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.859078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.859086 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.859097 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.859106 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.960842 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.960879 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.960890 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.960902 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:13 crc kubenswrapper[4632]: I1201 06:44:13.960913 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:13Z","lastTransitionTime":"2025-12-01T06:44:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.063068 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.063101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.063112 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.063124 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.063169 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.164749 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.164782 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.164790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.164804 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.164812 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.266035 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.266064 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.266072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.266084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.266091 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.367467 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.367492 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.367501 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.367510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.367517 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.468772 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.468794 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.468802 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.468810 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.468817 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.569931 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.569956 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.569963 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.569972 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.569979 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.671632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.671671 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.671683 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.671697 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.671706 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.749396 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.749425 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:14 crc kubenswrapper[4632]: E1201 06:44:14.749472 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:14 crc kubenswrapper[4632]: E1201 06:44:14.749538 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.749574 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:14 crc kubenswrapper[4632]: E1201 06:44:14.749650 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.772878 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.772900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.772908 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.772916 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.772923 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.874137 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.874165 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.874173 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.874183 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.874191 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.975565 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.975593 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.975602 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.975611 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:14 crc kubenswrapper[4632]: I1201 06:44:14.975619 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:14Z","lastTransitionTime":"2025-12-01T06:44:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.077043 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.077088 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.077103 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.077112 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.077119 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.178595 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.178624 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.178634 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.178646 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.178654 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.279555 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.279576 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.279585 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.279594 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.279601 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.380641 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.380665 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.380672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.380682 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.380690 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.482084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.482106 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.482115 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.482124 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.482131 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.583490 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.583845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.583914 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.583993 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.584089 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.594775 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:15 crc kubenswrapper[4632]: E1201 06:44:15.594862 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:44:15 crc kubenswrapper[4632]: E1201 06:44:15.594899 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:44:47.594887977 +0000 UTC m=+97.159900950 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.685558 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.685582 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.685590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.685599 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.685606 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.750189 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:15 crc kubenswrapper[4632]: E1201 06:44:15.750279 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.787062 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.787111 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.787121 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.787130 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.787139 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.888953 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.889276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.889419 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.889504 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.889563 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.991443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.991471 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.991479 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.991488 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:15 crc kubenswrapper[4632]: I1201 06:44:15.991495 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:15Z","lastTransitionTime":"2025-12-01T06:44:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.093523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.093551 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.093559 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.093568 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.093576 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.195373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.195395 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.195404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.195413 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.195420 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.296826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.296847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.296854 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.296863 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.296870 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.397904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.397930 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.397938 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.397949 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.397956 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.499334 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.499387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.499400 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.499411 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.499421 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.600731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.600756 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.600765 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.600774 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.600781 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.702234 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.702290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.702300 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.702309 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.702317 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.749465 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.749499 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:16 crc kubenswrapper[4632]: E1201 06:44:16.749543 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.749474 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:16 crc kubenswrapper[4632]: E1201 06:44:16.749636 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:16 crc kubenswrapper[4632]: E1201 06:44:16.749671 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.803910 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.803938 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.803946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.803956 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.803963 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.905243 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.905272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.905280 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.905290 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:16 crc kubenswrapper[4632]: I1201 06:44:16.905298 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:16Z","lastTransitionTime":"2025-12-01T06:44:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.006731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.006758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.006766 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.006775 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.006782 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.108465 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.108511 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.108521 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.108535 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.108544 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.210315 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.210340 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.210348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.210401 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.210411 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.312443 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.312486 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.312497 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.312511 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.312522 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.413992 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.414046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.414056 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.414070 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.414078 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.516135 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.516169 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.516177 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.516191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.516198 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.617888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.617922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.617930 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.617959 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.617969 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.719945 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.719971 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.719980 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.719991 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.720007 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.749584 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:17 crc kubenswrapper[4632]: E1201 06:44:17.749695 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.821598 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.821632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.821642 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.821654 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.821663 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.923552 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.923589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.923599 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.923611 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.923619 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:17Z","lastTransitionTime":"2025-12-01T06:44:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.978282 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/0.log" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.978337 4632 generic.go:334] "Generic (PLEG): container finished" podID="45a865b5-e289-4d8f-93d3-007d46f49be9" containerID="40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8" exitCode=1 Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.978377 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerDied","Data":"40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8"} Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.978733 4632 scope.go:117] "RemoveContainer" containerID="40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.988695 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:17 crc kubenswrapper[4632]: I1201 06:44:17.995709 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:17Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.003049 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.011706 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.018178 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.025097 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.025131 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.025141 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.025155 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.025169 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.027306 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.035929 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.049528 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.058289 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.066645 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.074578 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.081916 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.088366 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.095490 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.103797 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.110076 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.116756 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.127606 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.127635 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.127644 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.127658 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.127667 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.229793 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.229820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.229830 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.229842 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.229851 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.331328 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.331376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.331385 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.331397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.331405 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.432879 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.432901 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.432910 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.432920 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.432927 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.534415 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.534453 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.534461 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.534472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.534479 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.635875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.635902 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.635911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.635923 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.635932 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.737922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.737947 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.737958 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.737969 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.737979 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.749366 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:18 crc kubenswrapper[4632]: E1201 06:44:18.749457 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.749483 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:18 crc kubenswrapper[4632]: E1201 06:44:18.749552 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.749621 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:18 crc kubenswrapper[4632]: E1201 06:44:18.749731 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.839538 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.839559 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.839567 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.839579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.839587 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.941643 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.941689 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.941698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.941719 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.941727 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:18Z","lastTransitionTime":"2025-12-01T06:44:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.981693 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/0.log" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.981740 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerStarted","Data":"18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340"} Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.991720 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:18 crc kubenswrapper[4632]: I1201 06:44:18.999832 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:18Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.007869 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.018066 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.024844 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.033095 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.040908 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.043103 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.043125 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.043133 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.043144 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.043152 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.049290 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.056836 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.064123 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.071654 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.083235 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.090173 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.097471 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.105101 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.111772 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.119192 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:19Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.144632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.144661 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.144672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.144686 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.144694 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.246438 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.246629 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.246688 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.246778 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.246837 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.349081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.349110 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.349120 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.349132 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.349141 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.450800 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.450834 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.450843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.450855 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.450863 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.552712 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.552753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.552763 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.552776 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.552786 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.654631 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.654666 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.654675 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.654686 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.654695 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.749662 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:19 crc kubenswrapper[4632]: E1201 06:44:19.749785 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.756089 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.756119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.756128 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.756138 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.756146 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.858151 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.858191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.858202 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.858215 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.858224 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.960179 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.960216 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.960225 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.960239 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:19 crc kubenswrapper[4632]: I1201 06:44:19.960247 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:19Z","lastTransitionTime":"2025-12-01T06:44:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.062329 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.062416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.062426 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.062440 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.062450 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.164640 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.164691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.164701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.164714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.164723 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.266032 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.266773 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.266783 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.266797 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.266806 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.368694 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.368866 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.368875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.368888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.368898 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.470389 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.470423 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.470432 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.470444 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.470452 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.571984 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.572025 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.572033 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.572045 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.572052 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.673968 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.674015 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.674024 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.674037 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.674045 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.749721 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.749731 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:20 crc kubenswrapper[4632]: E1201 06:44:20.749819 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.749832 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:20 crc kubenswrapper[4632]: E1201 06:44:20.749883 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:20 crc kubenswrapper[4632]: E1201 06:44:20.749968 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.759409 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.766948 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.774740 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.775753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.775779 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.775787 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.775797 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.775806 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.787205 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.793901 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.801555 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.809867 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.821462 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.829969 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.838074 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.845557 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.853041 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.859931 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.866762 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.874444 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.877425 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.877457 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.877466 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.877478 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.877486 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.880792 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.887651 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:20Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.979377 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.979407 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.979415 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.979427 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:20 crc kubenswrapper[4632]: I1201 06:44:20.979435 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:20Z","lastTransitionTime":"2025-12-01T06:44:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.081537 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.081563 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.081571 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.081584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.081592 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.183727 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.183759 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.183767 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.183779 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.183787 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.285430 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.285464 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.285473 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.285485 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.285494 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.387235 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.387260 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.387270 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.387298 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.387306 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.489036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.489060 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.489068 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.489077 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.489106 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.590618 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.590643 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.590651 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.590662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.590670 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.691943 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.691965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.691973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.691982 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.691990 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.749728 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:21 crc kubenswrapper[4632]: E1201 06:44:21.749814 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.793903 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.793929 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.793938 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.793950 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.793959 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.895745 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.895780 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.895788 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.895801 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.895811 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.997258 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.997283 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.997291 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.997302 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:21 crc kubenswrapper[4632]: I1201 06:44:21.997310 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:21Z","lastTransitionTime":"2025-12-01T06:44:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.098561 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.098589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.098597 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.098609 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.098616 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.199874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.199900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.199909 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.199920 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.199931 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.301475 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.301507 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.301517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.301527 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.301536 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.402820 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.402846 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.402855 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.402865 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.402872 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.504245 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.504280 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.504288 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.504310 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.504318 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.606807 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.606844 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.606853 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.606882 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.606893 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.708967 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.708991 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.709023 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.709036 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.709044 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.749962 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.750031 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:22 crc kubenswrapper[4632]: E1201 06:44:22.750072 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.750092 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:22 crc kubenswrapper[4632]: E1201 06:44:22.750213 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:22 crc kubenswrapper[4632]: E1201 06:44:22.750344 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.811038 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.811065 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.811074 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.811084 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.811091 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.912204 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.912227 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.912236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.912247 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:22 crc kubenswrapper[4632]: I1201 06:44:22.912255 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:22Z","lastTransitionTime":"2025-12-01T06:44:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.014304 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.014334 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.014343 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.014369 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.014378 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.116678 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.116703 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.116711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.116722 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.116731 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.153883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.153906 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.153914 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.153922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.153930 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.162840 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:23Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.165191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.165219 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.165249 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.165262 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.165270 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.172734 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:23Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.175236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.175256 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.175264 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.175274 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.175282 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.182694 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:23Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.184912 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.185016 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.185081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.185145 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.185206 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.193139 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:23Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.195271 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.195299 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.195308 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.195320 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.195327 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.203042 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:23Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.203143 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.217994 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.218034 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.218059 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.218069 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.218077 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.319498 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.319535 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.319549 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.319562 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.319572 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.420996 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.421033 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.421041 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.421051 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.421058 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.522882 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.522913 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.522922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.522935 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.522964 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.624002 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.624056 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.624067 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.624079 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.624088 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.726210 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.726259 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.726268 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.726281 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.726289 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.749677 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:23 crc kubenswrapper[4632]: E1201 06:44:23.749773 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.827330 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.827372 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.827383 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.827393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.827400 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.928819 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.928841 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.928848 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.928857 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:23 crc kubenswrapper[4632]: I1201 06:44:23.928863 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:23Z","lastTransitionTime":"2025-12-01T06:44:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.030627 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.030648 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.030656 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.030665 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.030672 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.131685 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.131711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.131719 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.131728 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.131734 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.234110 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.234141 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.234149 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.234161 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.234185 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.336318 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.336350 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.336375 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.336387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.336396 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.438561 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.438590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.438597 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.438609 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.438616 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.541026 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.541286 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.541376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.541439 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.541495 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.643531 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.643563 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.643573 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.643590 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.643599 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.745753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.745784 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.745791 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.745802 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.745810 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.750182 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.750184 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.750269 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:24 crc kubenswrapper[4632]: E1201 06:44:24.750367 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:24 crc kubenswrapper[4632]: E1201 06:44:24.750513 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:24 crc kubenswrapper[4632]: E1201 06:44:24.750601 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.847704 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.847738 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.847746 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.847758 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.847766 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.950334 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.950388 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.950397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.950411 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:24 crc kubenswrapper[4632]: I1201 06:44:24.950420 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:24Z","lastTransitionTime":"2025-12-01T06:44:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.052811 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.052847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.052858 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.052870 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.052879 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.154631 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.154668 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.154677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.154691 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.154715 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.257150 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.257193 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.257203 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.257219 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.257229 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.358616 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.358648 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.358656 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.358668 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.358676 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.460050 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.460103 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.460112 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.460124 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.460132 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.562114 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.562163 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.562171 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.562183 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.562191 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.664414 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.664447 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.664456 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.664468 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.664477 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.749424 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:25 crc kubenswrapper[4632]: E1201 06:44:25.749518 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.749940 4632 scope.go:117] "RemoveContainer" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.765893 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.765934 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.765942 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.765954 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.765962 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.867801 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.867830 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.867839 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.867852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.867861 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.969545 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.969580 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.969588 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.969602 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.969612 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:25Z","lastTransitionTime":"2025-12-01T06:44:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:25 crc kubenswrapper[4632]: I1201 06:44:25.997710 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/2.log" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.000036 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.000348 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.013945 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.023259 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.030081 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.037574 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.046643 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.053603 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.061419 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.070980 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.071017 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.071026 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.071038 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.071046 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.071753 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.079152 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.087260 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.095269 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.107209 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.115339 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.123731 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.152145 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.167283 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.172740 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.172768 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.172778 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.172790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.172797 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.174564 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:26Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.274282 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.274312 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.274321 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.274335 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.274344 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.376014 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.376053 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.376061 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.376076 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.376084 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.478003 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.478046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.478054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.478074 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.478083 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.579499 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.579539 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.579549 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.579562 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.579570 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.680749 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.680785 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.680794 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.680806 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.680813 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.749823 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.749902 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:26 crc kubenswrapper[4632]: E1201 06:44:26.749933 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.750045 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:26 crc kubenswrapper[4632]: E1201 06:44:26.750109 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:26 crc kubenswrapper[4632]: E1201 06:44:26.750194 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.757402 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.782332 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.782376 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.782386 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.782397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.782405 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.884777 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.884807 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.884815 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.884826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.884838 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.986224 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.986252 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.986260 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.986272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:26 crc kubenswrapper[4632]: I1201 06:44:26.986279 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:26Z","lastTransitionTime":"2025-12-01T06:44:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.002951 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/3.log" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.003321 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/2.log" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.005073 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" exitCode=1 Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.005144 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.005175 4632 scope.go:117] "RemoveContainer" containerID="f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.005563 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:44:27 crc kubenswrapper[4632]: E1201 06:44:27.005691 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.014929 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.024176 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.030698 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.042752 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f45ea64636d1f9306f4f96cafdecc3b9798d2adfd6456a434808601bdb1f3d2b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:43:57Z\\\",\\\"message\\\":\\\"otocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-controller-manager/controller-manager_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-controller-manager/controller-manager\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.149\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1201 06:43:57.345042 6299 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb after 0 failed attempt(s)\\\\nI1201 06:43:57.345044 6299 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:26Z\\\",\\\"message\\\":\\\"ing zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1201 06:44:26.333935 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333943 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333941 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1201 06:44:26.333951 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333952 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333960 6696 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1201 06:44:26.333985 6696 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nF1201 06:44:26.333913 6696 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:44:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.051570 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.061141 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.069071 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.076802 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.085173 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.087451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.087487 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.087497 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.087513 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.087521 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.095241 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.101521 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.109495 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.115894 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4aa25e-9e9f-4eb2-aae7-c34257645b5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50d96c5ef61204ce6f65f27235cb5073bc6fc18c20aaa694143cfd9d32c8631f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.126363 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.132866 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.139742 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.148048 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.154988 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:27Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.189525 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.189554 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.189572 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.189584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.189591 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.291242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.291275 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.291283 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.291296 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.291305 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.392579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.392611 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.392621 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.392632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.392640 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.494650 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.494678 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.494688 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.494699 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.494707 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.596814 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.596838 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.596845 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.596854 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.596861 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.699052 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.699094 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.699104 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.699120 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.699129 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.749650 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:27 crc kubenswrapper[4632]: E1201 06:44:27.749741 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.800505 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.800530 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.800539 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.800550 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.800557 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.902807 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.902842 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.902850 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.902862 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:27 crc kubenswrapper[4632]: I1201 06:44:27.902870 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:27Z","lastTransitionTime":"2025-12-01T06:44:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.004505 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.004546 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.004554 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.004572 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.004581 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.008519 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/3.log" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.010891 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:44:28 crc kubenswrapper[4632]: E1201 06:44:28.011047 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.019343 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.027729 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.036055 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.047923 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:26Z\\\",\\\"message\\\":\\\"ing zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1201 06:44:26.333935 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333943 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333941 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1201 06:44:26.333951 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333952 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333960 6696 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1201 06:44:26.333985 6696 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nF1201 06:44:26.333913 6696 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:44:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.055593 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.064289 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.071695 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.077971 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.086000 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.093412 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.099751 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4aa25e-9e9f-4eb2-aae7-c34257645b5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50d96c5ef61204ce6f65f27235cb5073bc6fc18c20aaa694143cfd9d32c8631f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.106318 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.106373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.106382 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.106393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.106400 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.107635 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.113750 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.122691 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.130058 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.137093 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.145974 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.152404 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:28Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.208045 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.208072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.208081 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.208093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.208102 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.310176 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.310217 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.310227 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.310242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.310252 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.412099 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.412137 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.412147 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.412160 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.412168 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.513885 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.513919 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.513927 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.513940 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.513947 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.616255 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.616291 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.616300 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.616311 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.616322 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.718201 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.718234 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.718242 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.718253 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.718261 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.749663 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.749731 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:28 crc kubenswrapper[4632]: E1201 06:44:28.749772 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:28 crc kubenswrapper[4632]: E1201 06:44:28.749843 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.749903 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:28 crc kubenswrapper[4632]: E1201 06:44:28.750027 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.820761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.820790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.820801 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.820812 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.820822 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.922606 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.922638 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.922647 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.922658 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:28 crc kubenswrapper[4632]: I1201 06:44:28.922667 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:28Z","lastTransitionTime":"2025-12-01T06:44:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.024778 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.024827 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.024836 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.024848 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.024858 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.126418 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.126451 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.126460 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.126473 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.126481 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.228750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.228934 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.228942 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.228953 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.228961 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.330761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.330790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.330799 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.330810 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.330819 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.432811 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.432847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.432855 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.432868 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.432877 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.534681 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.534727 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.534736 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.534747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.534756 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.636338 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.636393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.636403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.636415 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.636424 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.737854 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.737881 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.737888 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.737899 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.737906 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.749155 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:29 crc kubenswrapper[4632]: E1201 06:44:29.749242 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.839874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.839904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.839911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.839922 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.839929 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.942125 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.942155 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.942164 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.942174 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:29 crc kubenswrapper[4632]: I1201 06:44:29.942182 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:29Z","lastTransitionTime":"2025-12-01T06:44:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.044980 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.045031 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.045041 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.045054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.045061 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.146314 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.146380 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.146388 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.146399 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.146406 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.248701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.248747 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.248755 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.248767 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.248775 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.350567 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.350593 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.350601 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.350611 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.350619 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.452662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.452708 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.452720 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.452731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.452738 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.554662 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.554693 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.554701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.554714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.554722 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.656835 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.656875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.656883 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.656900 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.656912 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.749685 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.749740 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:30 crc kubenswrapper[4632]: E1201 06:44:30.749770 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:30 crc kubenswrapper[4632]: E1201 06:44:30.749843 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.749881 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:30 crc kubenswrapper[4632]: E1201 06:44:30.749919 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.758553 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.758579 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.758587 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.758596 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.758603 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.759334 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.766135 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4aa25e-9e9f-4eb2-aae7-c34257645b5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50d96c5ef61204ce6f65f27235cb5073bc6fc18c20aaa694143cfd9d32c8631f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.774225 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.780407 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.787365 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.798693 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.806100 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.813825 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.824561 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.831327 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.843722 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:26Z\\\",\\\"message\\\":\\\"ing zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1201 06:44:26.333935 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333943 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333941 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1201 06:44:26.333951 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333952 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333960 6696 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1201 06:44:26.333985 6696 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nF1201 06:44:26.333913 6696 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:44:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.852004 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.859803 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.860243 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.860326 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.860420 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.860491 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.860544 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.867248 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.874963 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.882921 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.891162 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.898258 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hq8m4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ede6d987-8ead-4c6e-8492-655f67bb0476\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7947326acbf37eb7b74cbe098867c788061e2a0d8ddffecfe28a3391c67f58b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5ds6p\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:33Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hq8m4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:30Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.963447 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.963484 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.963495 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.963509 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:30 crc kubenswrapper[4632]: I1201 06:44:30.963520 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:30Z","lastTransitionTime":"2025-12-01T06:44:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.065753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.065781 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.065790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.065802 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.065810 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.167273 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.167298 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.167306 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.167317 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.167342 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.268987 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.269031 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.269039 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.269051 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.269059 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.371046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.371233 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.371292 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.371399 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.371456 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.473863 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.474072 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.474162 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.474222 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.474283 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.575843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.575897 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.575907 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.575919 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.575928 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.677808 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.677884 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.677894 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.677910 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.677917 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.750149 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:31 crc kubenswrapper[4632]: E1201 06:44:31.750426 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.779962 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.779992 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.780000 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.780021 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.780031 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.882079 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.882116 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.882127 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.882139 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.882147 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.984170 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.984208 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.984216 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.984229 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:31 crc kubenswrapper[4632]: I1201 06:44:31.984236 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:31Z","lastTransitionTime":"2025-12-01T06:44:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.086848 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.086893 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.086904 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.086918 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.086934 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.188781 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.188832 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.188840 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.188852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.188860 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.291390 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.291429 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.291438 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.291452 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.291460 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.392610 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.392645 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.392655 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.392666 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.392675 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.494623 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.494660 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.494668 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.494681 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.494690 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.596605 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.596645 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.596653 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.596667 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.596677 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.626991 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.627087 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:36.627068219 +0000 UTC m=+146.192081192 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.698678 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.698703 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.698711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.698723 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.698731 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.728438 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.728490 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.728515 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.728557 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728593 4632 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728634 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:36.728622825 +0000 UTC m=+146.293635788 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728638 4632 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728593 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728693 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:36.728681346 +0000 UTC m=+146.293694319 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728694 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728710 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728719 4632 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728728 4632 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728732 4632 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728773 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:36.728760084 +0000 UTC m=+146.293773058 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.728789 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:36.728782557 +0000 UTC m=+146.293795530 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.749423 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.749527 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.749549 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.749616 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.749622 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:32 crc kubenswrapper[4632]: E1201 06:44:32.749751 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.800065 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.800093 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.800102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.800113 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.800121 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.902182 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.902226 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.902236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.902250 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:32 crc kubenswrapper[4632]: I1201 06:44:32.902260 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:32Z","lastTransitionTime":"2025-12-01T06:44:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.004506 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.004540 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.004550 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.004562 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.004571 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.106087 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.106119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.106129 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.106141 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.106149 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.207613 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.207644 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.207670 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.207696 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.207705 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.241971 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.242008 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.242027 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.242042 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.242052 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.250763 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.253261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.253296 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.253307 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.253319 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.253327 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.261226 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.263348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.263425 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.263436 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.263449 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.263458 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.271629 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.274043 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.274069 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.274076 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.274087 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.274096 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.282225 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.284035 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.284060 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.284069 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.284078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.284085 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.291434 4632 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404552Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865352Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"8ffed684-78e5-4500-8de8-2b1680e680bb\\\",\\\"systemUUID\\\":\\\"1c5c4d3f-f0f9-4630-9fc3-9603e9ccfe24\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:33Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.291551 4632 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.309343 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.309387 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.309396 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.309408 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.309418 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.411100 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.411134 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.411145 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.411158 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.411168 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.513151 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.513182 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.513190 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.513200 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.513208 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.615403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.615434 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.615442 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.615454 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.615462 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.717558 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.717589 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.717598 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.717609 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.717618 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.749970 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:33 crc kubenswrapper[4632]: E1201 06:44:33.750073 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.819562 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.819711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.819771 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.819825 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.819874 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.921653 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.921697 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.921706 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.921721 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:33 crc kubenswrapper[4632]: I1201 06:44:33.921732 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:33Z","lastTransitionTime":"2025-12-01T06:44:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.023073 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.023100 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.023108 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.023119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.023127 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.125483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.125506 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.125514 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.125525 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.125533 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.227375 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.227408 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.227416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.227429 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.227436 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.329000 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.329053 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.329063 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.329078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.329088 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.430790 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.430826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.430836 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.430849 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.430857 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.532410 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.532449 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.532458 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.532472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.532481 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.634236 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.634272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.634281 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.634292 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.634304 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.736379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.736405 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.736413 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.736426 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.736434 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.750061 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.750061 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.750065 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:34 crc kubenswrapper[4632]: E1201 06:44:34.750289 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:34 crc kubenswrapper[4632]: E1201 06:44:34.750372 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:34 crc kubenswrapper[4632]: E1201 06:44:34.750440 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.838133 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.838172 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.838180 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.838192 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.838201 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.940424 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.940472 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.940482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.940493 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:34 crc kubenswrapper[4632]: I1201 06:44:34.940502 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:34Z","lastTransitionTime":"2025-12-01T06:44:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.042110 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.042147 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.042155 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.042167 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.042177 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.143753 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.143802 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.143813 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.143826 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.143853 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.246313 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.246348 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.246381 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.246394 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.246404 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.348231 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.348264 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.348272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.348286 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.348294 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.450129 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.450161 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.450169 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.450180 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.450201 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.552483 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.552513 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.552521 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.552532 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.552540 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.654174 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.654226 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.654234 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.654245 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.654254 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.750154 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:35 crc kubenswrapper[4632]: E1201 06:44:35.750272 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.756531 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.756560 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.756568 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.756580 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.756590 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.858398 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.858434 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.858444 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.858457 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.858467 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.960046 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.960535 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.960610 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.960671 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:35 crc kubenswrapper[4632]: I1201 06:44:35.960738 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:35Z","lastTransitionTime":"2025-12-01T06:44:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.064874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.064911 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.064921 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.064936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.064946 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.166734 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.166761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.166769 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.166781 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.166789 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.268702 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.268729 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.268737 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.268749 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.268758 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.371070 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.371159 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.371174 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.371262 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.371277 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.473175 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.473209 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.473217 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.473231 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.473239 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.574909 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.574940 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.574948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.574960 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.574969 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.676896 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.676942 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.676952 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.676964 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.676974 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.750206 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.750220 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.750286 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:36 crc kubenswrapper[4632]: E1201 06:44:36.750380 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:36 crc kubenswrapper[4632]: E1201 06:44:36.750489 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:36 crc kubenswrapper[4632]: E1201 06:44:36.750546 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.778403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.778450 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.778460 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.778470 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.778479 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.880810 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.880865 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.880874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.880886 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.880894 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.982654 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.982677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.982684 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.982694 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:36 crc kubenswrapper[4632]: I1201 06:44:36.982703 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:36Z","lastTransitionTime":"2025-12-01T06:44:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.083740 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.083773 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.083782 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.083795 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.083803 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.186674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.186707 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.186716 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.186731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.186740 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.288711 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.288761 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.288769 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.288782 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.288797 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.390176 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.390215 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.390223 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.390234 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.390243 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.492565 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.492603 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.492612 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.492625 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.492635 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.594699 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.594755 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.594764 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.594776 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.594785 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.696799 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.696852 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.696861 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.696874 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.696883 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.749924 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:37 crc kubenswrapper[4632]: E1201 06:44:37.750049 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.798891 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.798944 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.798953 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.798965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.798974 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.901184 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.901241 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.901253 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.901267 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:37 crc kubenswrapper[4632]: I1201 06:44:37.901277 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:37Z","lastTransitionTime":"2025-12-01T06:44:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.003294 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.003331 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.003340 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.003379 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.003390 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.105684 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.105739 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.105750 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.105764 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.105772 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.207321 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.207380 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.207391 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.207403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.207411 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.309238 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.309276 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.309285 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.309297 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.309306 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.411517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.411572 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.411581 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.411598 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.411607 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.513288 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.513327 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.513339 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.513373 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.513387 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.615054 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.615091 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.615102 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.615116 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.615123 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.716658 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.716693 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.716701 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.716712 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.716722 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.749212 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.749471 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.749535 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:38 crc kubenswrapper[4632]: E1201 06:44:38.749862 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:38 crc kubenswrapper[4632]: E1201 06:44:38.750120 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:38 crc kubenswrapper[4632]: E1201 06:44:38.750581 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.761751 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.818737 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.818763 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.818771 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.818781 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.818789 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.920707 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.921069 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.921130 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.921187 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:38 crc kubenswrapper[4632]: I1201 06:44:38.921252 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:38Z","lastTransitionTime":"2025-12-01T06:44:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.023134 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.023584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.023677 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.023762 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.023830 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.126386 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.126428 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.126437 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.126450 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.126461 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.227901 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.227929 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.227937 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.227948 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.227958 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.329896 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.330139 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.330231 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.330435 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.330594 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.432788 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.432944 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.433040 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.433101 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.433161 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.534659 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.534688 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.534698 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.534710 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.534718 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.636199 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.636393 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.636454 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.636511 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.636605 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.738078 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.738213 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.738272 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.738335 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.738417 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.749626 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:39 crc kubenswrapper[4632]: E1201 06:44:39.749716 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.840525 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.840552 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.840567 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.840577 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.840584 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.942015 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.942049 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.942057 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.942068 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:39 crc kubenswrapper[4632]: I1201 06:44:39.942075 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:39Z","lastTransitionTime":"2025-12-01T06:44:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.043559 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.043584 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.043592 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.043601 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.043608 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.144916 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.144965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.144973 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.144985 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.144995 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.247159 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.247292 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.247405 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.247474 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.247575 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.349261 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.349404 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.349499 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.349559 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.349626 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.450672 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.450700 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.450709 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.450717 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.450724 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.552210 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.552315 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.552397 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.552462 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.552513 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.654652 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.654706 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.654714 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.654726 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.654734 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.749615 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:40 crc kubenswrapper[4632]: E1201 06:44:40.749705 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.749720 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:40 crc kubenswrapper[4632]: E1201 06:44:40.749816 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.749825 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:40 crc kubenswrapper[4632]: E1201 06:44:40.749973 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.750461 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:44:40 crc kubenswrapper[4632]: E1201 06:44:40.750586 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.755735 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.755764 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.755774 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.755785 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.755792 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.758565 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6e82ce7b-2c24-4aa4-828b-5dc8a606ea6a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://872dd39ba608722c008353bfe8e84990163f105ea8fca770893d2b35c63d51d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c6c8a78975e59379ff4a52113b032b96ed7c36cd5005ec56b352ed5c700f2df7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a708cf163fc7fd531bc9170e4a5404a22536fffe4ae2755d6b3d4c12ba7ad72\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b7314d923bb8f2b21d40955c1aa5a2aa630a6916d05b64e4919394018db6bd4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.765056 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae4aa25e-9e9f-4eb2-aae7-c34257645b5a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://50d96c5ef61204ce6f65f27235cb5073bc6fc18c20aaa694143cfd9d32c8631f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4fbf8fb6184a7243098cb443f0531f3cab7aacfe935f4f139e91194c07945e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.775252 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7347c403def01d9765d220697dc3262a2e441afab4d9edc5f9aea083df74d354\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e993577872e1a5ed0882aadbb13c9d6d4704fe3e8c652fd9d09ebc9a4ceabd60\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.781604 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-49pcd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c73ddd8-a673-4565-a30b-5b4d5b708edd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://676e9208ae678d0481bd8fcaa29f3d537bcdad65e790fabf0d33d6663fe03d80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pxnk7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-49pcd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.789267 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b835e6d-98fc-4bfb-bb49-a463c40c06f9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1eeae6ad0d860894ab1c556177b18005e8a17808ee153fdce3798afe99fb1492\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://88c81cae08e3eca8176795f0b04f2e58b9ba37d3ae27c02a357fbb5a2c07c019\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zf6z5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:41Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-4jmb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.797194 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e05a383f-53c3-4f52-9c31-1b73ff3122b6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.804499 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"168bb8aa-8b43-44df-836f-90d6d52f1539\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://448e0609375ee156d1124a50f79c851f0ea878844de9a74ecfc8b4709c0e1cba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ftxfc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-gm9xs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.811608 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://818f485102829582de37218cf2baaebf1f9f85b1451ebaf172ba0e06627aead0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.821011 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c2ef6994-166c-4195-af3f-a55124a4c441\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3608e4738e1a29dbcc71944454085dcb6d9db81026a72449941372b7d11c912f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://97a4a239f2ac00b95a734aed5571dec6d75cb1aa69e753685f3c9da626b12238\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61454f09d060c657b5209444bbcebc5f0d055d48f8db36b734bee1631cc50da3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://851cd32ab3ead99576a0c082d18af4fa422132bf3310d8500f3203c69032fd46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4d5f3c48b88e539303a89290798d15cb42817668550e7579930cb78c88594a1c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9d89cd2bbba16a4539ed841127929b8a37cf78d3ef211a3e700b5848149a369e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://621852ac6d0f59b821bc52861acc28c760ae1472952a16e38dbf6a5f3a52c647\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qzg5n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-rs4h5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.828707 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"559abf1e-dc19-40e9-b75b-9a327d661dc0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:43Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qmv4x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:43Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nqqbv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.836004 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.843711 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-zpkn8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45a865b5-e289-4d8f-93d3-007d46f49be9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:44:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:17Z\\\",\\\"message\\\":\\\"2025-12-01T06:43:32+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f\\\\n2025-12-01T06:43:32+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_cebe59f2-1c92-4992-b1a6-1097444c902f to /host/opt/cni/bin/\\\\n2025-12-01T06:43:32Z [verbose] multus-daemon started\\\\n2025-12-01T06:43:32Z [verbose] Readiness Indicator file check\\\\n2025-12-01T06:44:17Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:44:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zp9r6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-multus\"/\"multus-zpkn8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.855602 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ac685f74-ea0b-4a05-8018-a68fc1df20cc\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:30Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-01T06:44:26Z\\\",\\\"message\\\":\\\"ing zone local for Pod openshift-network-operator/iptables-alerter-4ln5h in node crc\\\\nI1201 06:44:26.333935 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333943 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333941 6696 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1201 06:44:26.333951 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1201 06:44:26.333952 6696 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI1201 06:44:26.333960 6696 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1201 06:44:26.333985 6696 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nF1201 06:44:26.333913 6696 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-01T06:44:25Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-622pg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:30Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-gklnd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.857599 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.857627 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.857636 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.857649 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.857657 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.868423 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ff1ca79f-5470-4eb2-a9d1-ee5b04bb67cb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://205d7d446d5e154ed8ace14bee0d57ce0659c4ba33106a48c49fb1048ffd0018\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e573f82e576ea344fc02cc74cba66b722e8e1e7402197f169f3bdb0f575f9231\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://07a44b45a108c7863728a8fb8bd0e37ce8fd37af4d2e4afb9e032e059c1e981d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7009150a923be5d8fa956d16f8538d6229318b2187c19c8063b6ef0279a2e716\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe9f3c85414e07c1ece6673f05f3fff0a0ff5b03db1c85b6ec24b3945ca5f8e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d2c4b50f55f202752999be170a734f2bf6c1c7059177bc5c70bf32b3bbe3ea3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0d2c4b50f55f202752999be170a734f2bf6c1c7059177bc5c70bf32b3bbe3ea3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8001a593b9645f20604d901f2c917a86ea61544de553eeb8a76e69ca3235a9e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8001a593b9645f20604d901f2c917a86ea61544de553eeb8a76e69ca3235a9e7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://9811922604b7c9c3d16684362ff5029b692e21f4aa234a1f77131dcade7b1baf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9811922604b7c9c3d16684362ff5029b692e21f4aa234a1f77131dcade7b1baf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.875815 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b6acb373-fed7-4c33-b6b1-3fbbb793505e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3dbccad0a202f1a2037221cf3b57996a3395c2d9b235eb2d380234807c46fd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://46699d8fbb5454eee031e8753621d98689c0b0dc8dd4b79b50b695fa1042edfc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0ec91a79e54524d7e9d75431cadd6ea963150bdcc3795c0d036a5fd5d3cdf32\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-01T06:43:10Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.883422 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:29Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0b836df087121ec93df5fe4ba3a341a4f5a40a986d30ce527bfc1040334e6df3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-01T06:43:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.891117 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.898477 4632 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-01T06:43:26Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-01T06:44:40Z is after 2025-08-24T17:21:41Z" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.912245 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-hq8m4" podStartSLOduration=71.912237048 podStartE2EDuration="1m11.912237048s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:40.912170072 +0000 UTC m=+90.477183046" watchObservedRunningTime="2025-12-01 06:44:40.912237048 +0000 UTC m=+90.477250021" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.959887 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.959936 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.959946 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.959958 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:40 crc kubenswrapper[4632]: I1201 06:44:40.959966 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:40Z","lastTransitionTime":"2025-12-01T06:44:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.061575 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.061604 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.061631 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.061648 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.061655 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.163819 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.163847 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.163855 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.163868 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.163877 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.268253 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.268291 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.268317 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.268333 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.268344 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.370482 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.370510 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.370518 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.370529 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.370539 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.471791 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.471821 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.471829 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.471841 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.471850 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.573119 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.573250 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.573307 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.573384 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.573439 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.674693 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.674723 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.674731 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.674742 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.674750 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.750169 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:41 crc kubenswrapper[4632]: E1201 06:44:41.750270 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.776965 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.776987 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.776995 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.777007 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.777016 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.878854 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.878879 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.878887 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.878898 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.878906 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.980632 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.980666 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.980674 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.980686 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:41 crc kubenswrapper[4632]: I1201 06:44:41.980696 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:41Z","lastTransitionTime":"2025-12-01T06:44:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.082416 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.082445 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.082454 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.082465 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.082472 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.184843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.184899 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.184908 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.184918 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.184924 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.286869 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.286914 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.286924 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.286940 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.286951 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.389109 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.389138 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.389147 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.389159 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.389167 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.491517 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.491548 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.491555 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.491568 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.491577 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.593880 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.593914 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.593923 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.593935 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.593944 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.695803 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.695834 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.695843 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.695853 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.695862 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.749701 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.749762 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:42 crc kubenswrapper[4632]: E1201 06:44:42.749805 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.749832 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:42 crc kubenswrapper[4632]: E1201 06:44:42.749926 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:42 crc kubenswrapper[4632]: E1201 06:44:42.750004 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.797500 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.797522 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.797531 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.797541 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.797549 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.898814 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.898840 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.898849 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.898860 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:42 crc kubenswrapper[4632]: I1201 06:44:42.898878 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:42Z","lastTransitionTime":"2025-12-01T06:44:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.000496 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.000520 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.000527 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.000549 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.000560 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.101799 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.101846 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.101863 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.101875 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.101883 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.203476 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.203506 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.203515 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.203523 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.203530 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.304998 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.305051 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.305073 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.305090 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.305098 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.406157 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.406191 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.406202 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.406213 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.406222 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.450403 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.450427 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.450436 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.450454 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.450462 4632 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-01T06:44:43Z","lastTransitionTime":"2025-12-01T06:44:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.478423 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4"] Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.478697 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.479882 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.479917 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.479882 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.480531 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.497925 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-rs4h5" podStartSLOduration=73.497912507 podStartE2EDuration="1m13.497912507s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.497909642 +0000 UTC m=+93.062922615" watchObservedRunningTime="2025-12-01 06:44:43.497912507 +0000 UTC m=+93.062925481" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.521192 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=5.521177099 podStartE2EDuration="5.521177099s" podCreationTimestamp="2025-12-01 06:44:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.520285919 +0000 UTC m=+93.085298892" watchObservedRunningTime="2025-12-01 06:44:43.521177099 +0000 UTC m=+93.086190072" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.539836 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=74.539822856 podStartE2EDuration="1m14.539822856s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.531163217 +0000 UTC m=+93.096176190" watchObservedRunningTime="2025-12-01 06:44:43.539822856 +0000 UTC m=+93.104835829" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.587438 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-zpkn8" podStartSLOduration=74.587423609 podStartE2EDuration="1m14.587423609s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.572514823 +0000 UTC m=+93.137527796" watchObservedRunningTime="2025-12-01 06:44:43.587423609 +0000 UTC m=+93.152436581" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.595580 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=42.595567936 podStartE2EDuration="42.595567936s" podCreationTimestamp="2025-12-01 06:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.594881372 +0000 UTC m=+93.159894345" watchObservedRunningTime="2025-12-01 06:44:43.595567936 +0000 UTC m=+93.160580910" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.611462 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.611447441 podStartE2EDuration="17.611447441s" podCreationTimestamp="2025-12-01 06:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.603017395 +0000 UTC m=+93.168030378" watchObservedRunningTime="2025-12-01 06:44:43.611447441 +0000 UTC m=+93.176460414" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.619417 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.619493 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee18a79-6fce-411f-a57d-bfb84fb891e7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.619520 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ee18a79-6fce-411f-a57d-bfb84fb891e7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.619565 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee18a79-6fce-411f-a57d-bfb84fb891e7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.619584 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.626342 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-4jmb9" podStartSLOduration=73.626329094 podStartE2EDuration="1m13.626329094s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.62610381 +0000 UTC m=+93.191116783" watchObservedRunningTime="2025-12-01 06:44:43.626329094 +0000 UTC m=+93.191342068" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.626517 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-49pcd" podStartSLOduration=74.626511969 podStartE2EDuration="1m14.626511969s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.618127078 +0000 UTC m=+93.183140051" watchObservedRunningTime="2025-12-01 06:44:43.626511969 +0000 UTC m=+93.191524942" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.637287 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=76.637278979 podStartE2EDuration="1m16.637278979s" podCreationTimestamp="2025-12-01 06:43:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.636491055 +0000 UTC m=+93.201504028" watchObservedRunningTime="2025-12-01 06:44:43.637278979 +0000 UTC m=+93.202291952" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.644299 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podStartSLOduration=74.644287728 podStartE2EDuration="1m14.644287728s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:43.643906909 +0000 UTC m=+93.208919882" watchObservedRunningTime="2025-12-01 06:44:43.644287728 +0000 UTC m=+93.209300700" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720264 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720310 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ee18a79-6fce-411f-a57d-bfb84fb891e7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720326 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee18a79-6fce-411f-a57d-bfb84fb891e7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720378 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee18a79-6fce-411f-a57d-bfb84fb891e7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720398 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720405 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.720476 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/6ee18a79-6fce-411f-a57d-bfb84fb891e7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.721238 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6ee18a79-6fce-411f-a57d-bfb84fb891e7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.724640 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ee18a79-6fce-411f-a57d-bfb84fb891e7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.733591 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6ee18a79-6fce-411f-a57d-bfb84fb891e7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-w6zf4\" (UID: \"6ee18a79-6fce-411f-a57d-bfb84fb891e7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.749456 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:43 crc kubenswrapper[4632]: E1201 06:44:43.749548 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:43 crc kubenswrapper[4632]: I1201 06:44:43.787983 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" Dec 01 06:44:43 crc kubenswrapper[4632]: W1201 06:44:43.796981 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ee18a79_6fce_411f_a57d_bfb84fb891e7.slice/crio-4f9f19ee70c0c1b2e93562132f676ca4f01aeda2a94a020d9bb4fec4d29ea1e4 WatchSource:0}: Error finding container 4f9f19ee70c0c1b2e93562132f676ca4f01aeda2a94a020d9bb4fec4d29ea1e4: Status 404 returned error can't find the container with id 4f9f19ee70c0c1b2e93562132f676ca4f01aeda2a94a020d9bb4fec4d29ea1e4 Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.044536 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" event={"ID":"6ee18a79-6fce-411f-a57d-bfb84fb891e7","Type":"ContainerStarted","Data":"746eb4b3ca4077cde320fe692538e3ca8948ae14f506c879e1120caf345c1d5e"} Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.044582 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" event={"ID":"6ee18a79-6fce-411f-a57d-bfb84fb891e7","Type":"ContainerStarted","Data":"4f9f19ee70c0c1b2e93562132f676ca4f01aeda2a94a020d9bb4fec4d29ea1e4"} Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.052782 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-w6zf4" podStartSLOduration=75.052770042 podStartE2EDuration="1m15.052770042s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:44:44.052534527 +0000 UTC m=+93.617547500" watchObservedRunningTime="2025-12-01 06:44:44.052770042 +0000 UTC m=+93.617783015" Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.749392 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.749698 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:44 crc kubenswrapper[4632]: E1201 06:44:44.749728 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:44 crc kubenswrapper[4632]: I1201 06:44:44.749775 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:44 crc kubenswrapper[4632]: E1201 06:44:44.749874 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:44 crc kubenswrapper[4632]: E1201 06:44:44.749980 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:45 crc kubenswrapper[4632]: I1201 06:44:45.750149 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:45 crc kubenswrapper[4632]: E1201 06:44:45.750427 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:46 crc kubenswrapper[4632]: I1201 06:44:46.749583 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:46 crc kubenswrapper[4632]: E1201 06:44:46.749845 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:46 crc kubenswrapper[4632]: I1201 06:44:46.749683 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:46 crc kubenswrapper[4632]: I1201 06:44:46.749583 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:46 crc kubenswrapper[4632]: E1201 06:44:46.750043 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:46 crc kubenswrapper[4632]: E1201 06:44:46.750153 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:47 crc kubenswrapper[4632]: I1201 06:44:47.652626 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:47 crc kubenswrapper[4632]: E1201 06:44:47.652743 4632 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:44:47 crc kubenswrapper[4632]: E1201 06:44:47.652785 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs podName:559abf1e-dc19-40e9-b75b-9a327d661dc0 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:51.652774769 +0000 UTC m=+161.217787742 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs") pod "network-metrics-daemon-nqqbv" (UID: "559abf1e-dc19-40e9-b75b-9a327d661dc0") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 01 06:44:47 crc kubenswrapper[4632]: I1201 06:44:47.750120 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:47 crc kubenswrapper[4632]: E1201 06:44:47.750215 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:48 crc kubenswrapper[4632]: I1201 06:44:48.749752 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:48 crc kubenswrapper[4632]: E1201 06:44:48.749830 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:48 crc kubenswrapper[4632]: I1201 06:44:48.749907 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:48 crc kubenswrapper[4632]: I1201 06:44:48.749761 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:48 crc kubenswrapper[4632]: E1201 06:44:48.750008 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:48 crc kubenswrapper[4632]: E1201 06:44:48.750187 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:49 crc kubenswrapper[4632]: I1201 06:44:49.749425 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:49 crc kubenswrapper[4632]: E1201 06:44:49.749542 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:50 crc kubenswrapper[4632]: I1201 06:44:50.750116 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:50 crc kubenswrapper[4632]: I1201 06:44:50.750177 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:50 crc kubenswrapper[4632]: E1201 06:44:50.751444 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:50 crc kubenswrapper[4632]: I1201 06:44:50.751469 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:50 crc kubenswrapper[4632]: E1201 06:44:50.751645 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:50 crc kubenswrapper[4632]: E1201 06:44:50.751545 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:51 crc kubenswrapper[4632]: I1201 06:44:51.749559 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:51 crc kubenswrapper[4632]: E1201 06:44:51.749666 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:52 crc kubenswrapper[4632]: I1201 06:44:52.749415 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:52 crc kubenswrapper[4632]: I1201 06:44:52.749461 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:52 crc kubenswrapper[4632]: E1201 06:44:52.749526 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:52 crc kubenswrapper[4632]: I1201 06:44:52.749546 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:52 crc kubenswrapper[4632]: E1201 06:44:52.749604 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:52 crc kubenswrapper[4632]: E1201 06:44:52.749671 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:53 crc kubenswrapper[4632]: I1201 06:44:53.749891 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:53 crc kubenswrapper[4632]: E1201 06:44:53.749979 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:54 crc kubenswrapper[4632]: I1201 06:44:54.749802 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:54 crc kubenswrapper[4632]: I1201 06:44:54.749966 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:54 crc kubenswrapper[4632]: I1201 06:44:54.749978 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:54 crc kubenswrapper[4632]: E1201 06:44:54.750346 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:54 crc kubenswrapper[4632]: E1201 06:44:54.750456 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:54 crc kubenswrapper[4632]: I1201 06:44:54.750473 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:44:54 crc kubenswrapper[4632]: E1201 06:44:54.750533 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:54 crc kubenswrapper[4632]: E1201 06:44:54.750693 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:44:55 crc kubenswrapper[4632]: I1201 06:44:55.749525 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:55 crc kubenswrapper[4632]: E1201 06:44:55.749627 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:56 crc kubenswrapper[4632]: I1201 06:44:56.750014 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:56 crc kubenswrapper[4632]: E1201 06:44:56.750599 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:56 crc kubenswrapper[4632]: I1201 06:44:56.750080 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:56 crc kubenswrapper[4632]: E1201 06:44:56.750816 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:56 crc kubenswrapper[4632]: I1201 06:44:56.750043 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:56 crc kubenswrapper[4632]: E1201 06:44:56.751010 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:57 crc kubenswrapper[4632]: I1201 06:44:57.750053 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:57 crc kubenswrapper[4632]: E1201 06:44:57.750167 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:44:58 crc kubenswrapper[4632]: I1201 06:44:58.749821 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:44:58 crc kubenswrapper[4632]: I1201 06:44:58.749858 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:44:58 crc kubenswrapper[4632]: I1201 06:44:58.749866 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:44:58 crc kubenswrapper[4632]: E1201 06:44:58.749945 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:44:58 crc kubenswrapper[4632]: E1201 06:44:58.750089 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:44:58 crc kubenswrapper[4632]: E1201 06:44:58.750137 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:44:59 crc kubenswrapper[4632]: I1201 06:44:59.749145 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:44:59 crc kubenswrapper[4632]: E1201 06:44:59.749252 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:00 crc kubenswrapper[4632]: I1201 06:45:00.750048 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:00 crc kubenswrapper[4632]: I1201 06:45:00.750063 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:00 crc kubenswrapper[4632]: E1201 06:45:00.750763 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:00 crc kubenswrapper[4632]: I1201 06:45:00.750789 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:00 crc kubenswrapper[4632]: E1201 06:45:00.750842 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:00 crc kubenswrapper[4632]: E1201 06:45:00.751029 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:01 crc kubenswrapper[4632]: I1201 06:45:01.749765 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:01 crc kubenswrapper[4632]: E1201 06:45:01.749900 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:02 crc kubenswrapper[4632]: I1201 06:45:02.749831 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:02 crc kubenswrapper[4632]: E1201 06:45:02.749936 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:02 crc kubenswrapper[4632]: I1201 06:45:02.750001 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:02 crc kubenswrapper[4632]: I1201 06:45:02.750027 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:02 crc kubenswrapper[4632]: E1201 06:45:02.750131 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:02 crc kubenswrapper[4632]: E1201 06:45:02.750215 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:03 crc kubenswrapper[4632]: I1201 06:45:03.750001 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:03 crc kubenswrapper[4632]: E1201 06:45:03.750104 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086234 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/1.log" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086519 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/0.log" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086548 4632 generic.go:334] "Generic (PLEG): container finished" podID="45a865b5-e289-4d8f-93d3-007d46f49be9" containerID="18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340" exitCode=1 Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086570 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerDied","Data":"18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340"} Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086595 4632 scope.go:117] "RemoveContainer" containerID="40aba2ef54873f104c888f2853e1187443a141c4e3b76c5223c6615de8c471e8" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.086857 4632 scope.go:117] "RemoveContainer" containerID="18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340" Dec 01 06:45:04 crc kubenswrapper[4632]: E1201 06:45:04.086969 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-zpkn8_openshift-multus(45a865b5-e289-4d8f-93d3-007d46f49be9)\"" pod="openshift-multus/multus-zpkn8" podUID="45a865b5-e289-4d8f-93d3-007d46f49be9" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.750230 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.750244 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:04 crc kubenswrapper[4632]: I1201 06:45:04.750239 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:04 crc kubenswrapper[4632]: E1201 06:45:04.750341 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:04 crc kubenswrapper[4632]: E1201 06:45:04.750437 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:04 crc kubenswrapper[4632]: E1201 06:45:04.750538 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:05 crc kubenswrapper[4632]: I1201 06:45:05.089465 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/1.log" Dec 01 06:45:05 crc kubenswrapper[4632]: I1201 06:45:05.749932 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:05 crc kubenswrapper[4632]: E1201 06:45:05.750019 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:05 crc kubenswrapper[4632]: I1201 06:45:05.750460 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:45:05 crc kubenswrapper[4632]: E1201 06:45:05.750610 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-gklnd_openshift-ovn-kubernetes(ac685f74-ea0b-4a05-8018-a68fc1df20cc)\"" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" Dec 01 06:45:06 crc kubenswrapper[4632]: I1201 06:45:06.749470 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:06 crc kubenswrapper[4632]: E1201 06:45:06.749563 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:06 crc kubenswrapper[4632]: I1201 06:45:06.749599 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:06 crc kubenswrapper[4632]: I1201 06:45:06.749671 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:06 crc kubenswrapper[4632]: E1201 06:45:06.749717 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:06 crc kubenswrapper[4632]: E1201 06:45:06.749790 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:07 crc kubenswrapper[4632]: I1201 06:45:07.749631 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:07 crc kubenswrapper[4632]: E1201 06:45:07.749732 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:08 crc kubenswrapper[4632]: I1201 06:45:08.749385 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:08 crc kubenswrapper[4632]: I1201 06:45:08.749455 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:08 crc kubenswrapper[4632]: E1201 06:45:08.749496 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:08 crc kubenswrapper[4632]: E1201 06:45:08.749617 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:08 crc kubenswrapper[4632]: I1201 06:45:08.749667 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:08 crc kubenswrapper[4632]: E1201 06:45:08.749799 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:09 crc kubenswrapper[4632]: I1201 06:45:09.749703 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:09 crc kubenswrapper[4632]: E1201 06:45:09.749812 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:10 crc kubenswrapper[4632]: I1201 06:45:10.749241 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:10 crc kubenswrapper[4632]: I1201 06:45:10.749301 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:10 crc kubenswrapper[4632]: I1201 06:45:10.749321 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:10 crc kubenswrapper[4632]: E1201 06:45:10.749918 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:10 crc kubenswrapper[4632]: E1201 06:45:10.749991 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:10 crc kubenswrapper[4632]: E1201 06:45:10.750068 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:10 crc kubenswrapper[4632]: E1201 06:45:10.806592 4632 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 01 06:45:10 crc kubenswrapper[4632]: E1201 06:45:10.816108 4632 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 06:45:11 crc kubenswrapper[4632]: I1201 06:45:11.749211 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:11 crc kubenswrapper[4632]: E1201 06:45:11.749329 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:12 crc kubenswrapper[4632]: I1201 06:45:12.749775 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:12 crc kubenswrapper[4632]: I1201 06:45:12.749826 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:12 crc kubenswrapper[4632]: I1201 06:45:12.750417 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:12 crc kubenswrapper[4632]: E1201 06:45:12.750533 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:12 crc kubenswrapper[4632]: E1201 06:45:12.750637 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:12 crc kubenswrapper[4632]: E1201 06:45:12.750780 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:13 crc kubenswrapper[4632]: I1201 06:45:13.749532 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:13 crc kubenswrapper[4632]: E1201 06:45:13.749648 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:14 crc kubenswrapper[4632]: I1201 06:45:14.749906 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:14 crc kubenswrapper[4632]: I1201 06:45:14.749972 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:14 crc kubenswrapper[4632]: I1201 06:45:14.749993 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:14 crc kubenswrapper[4632]: E1201 06:45:14.750401 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:14 crc kubenswrapper[4632]: E1201 06:45:14.750209 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:14 crc kubenswrapper[4632]: E1201 06:45:14.750449 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:15 crc kubenswrapper[4632]: I1201 06:45:15.749864 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:15 crc kubenswrapper[4632]: E1201 06:45:15.749981 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:15 crc kubenswrapper[4632]: E1201 06:45:15.817100 4632 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 01 06:45:16 crc kubenswrapper[4632]: I1201 06:45:16.750206 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:16 crc kubenswrapper[4632]: I1201 06:45:16.750252 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:16 crc kubenswrapper[4632]: I1201 06:45:16.750438 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:16 crc kubenswrapper[4632]: E1201 06:45:16.750504 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:16 crc kubenswrapper[4632]: I1201 06:45:16.750528 4632 scope.go:117] "RemoveContainer" containerID="18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340" Dec 01 06:45:16 crc kubenswrapper[4632]: E1201 06:45:16.750561 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:16 crc kubenswrapper[4632]: E1201 06:45:16.750622 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:16 crc kubenswrapper[4632]: I1201 06:45:16.751113 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.117159 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/1.log" Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.117238 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerStarted","Data":"cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507"} Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.119250 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/3.log" Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.122315 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerStarted","Data":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.122658 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.148102 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podStartSLOduration=107.148087923 podStartE2EDuration="1m47.148087923s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:17.147140037 +0000 UTC m=+126.712153010" watchObservedRunningTime="2025-12-01 06:45:17.148087923 +0000 UTC m=+126.713100896" Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.347186 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nqqbv"] Dec 01 06:45:17 crc kubenswrapper[4632]: I1201 06:45:17.347281 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:17 crc kubenswrapper[4632]: E1201 06:45:17.347371 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:18 crc kubenswrapper[4632]: I1201 06:45:18.749483 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:18 crc kubenswrapper[4632]: I1201 06:45:18.749525 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:18 crc kubenswrapper[4632]: I1201 06:45:18.749537 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:18 crc kubenswrapper[4632]: I1201 06:45:18.749613 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:18 crc kubenswrapper[4632]: E1201 06:45:18.749613 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:18 crc kubenswrapper[4632]: E1201 06:45:18.749702 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:18 crc kubenswrapper[4632]: E1201 06:45:18.749754 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:18 crc kubenswrapper[4632]: E1201 06:45:18.749814 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:20 crc kubenswrapper[4632]: I1201 06:45:20.751467 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:20 crc kubenswrapper[4632]: I1201 06:45:20.751540 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:20 crc kubenswrapper[4632]: I1201 06:45:20.751567 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:20 crc kubenswrapper[4632]: E1201 06:45:20.751830 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 01 06:45:20 crc kubenswrapper[4632]: E1201 06:45:20.752090 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nqqbv" podUID="559abf1e-dc19-40e9-b75b-9a327d661dc0" Dec 01 06:45:20 crc kubenswrapper[4632]: E1201 06:45:20.752244 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 01 06:45:20 crc kubenswrapper[4632]: I1201 06:45:20.753076 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:20 crc kubenswrapper[4632]: E1201 06:45:20.753181 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.749779 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.749806 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.749873 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.749872 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.752406 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.752437 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.752444 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.752444 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.752663 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 06:45:22 crc kubenswrapper[4632]: I1201 06:45:22.753480 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.920184 4632 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.945648 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kxph4"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.946068 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948623 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit-dir\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948666 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-node-pullsecrets\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948688 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-client\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948708 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-image-import-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948729 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-encryption-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948758 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948776 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42b2t\" (UniqueName: \"kubernetes.io/projected/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-kube-api-access-42b2t\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948803 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-serving-cert\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948823 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948840 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948878 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.948987 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.949502 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.950199 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g99wc"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.951233 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.951775 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.952276 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.956685 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.956762 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.956889 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.956972 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957015 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957126 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957257 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957348 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957372 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957676 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957708 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957723 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957787 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957869 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957912 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957524 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957560 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958027 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957604 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.957652 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958032 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958086 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958445 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958935 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nqvnc"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.958937 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.959197 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.959989 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.960343 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-wchfw"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.960347 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.960727 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.961606 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.961856 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d9ddv"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.962086 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.962495 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.962626 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.962732 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.965292 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.966176 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.966327 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.966666 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.967175 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.967484 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.968682 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.968915 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.969217 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.969404 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.969807 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.969454 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.971427 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.971851 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.972079 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hf9tg"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.972435 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.972684 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.972935 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.973481 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.973550 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.973915 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.974611 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.976518 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.977687 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.978024 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.978176 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.978803 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.979133 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.979603 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.986874 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wmm47"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.987421 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.987687 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.989147 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.989981 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.990504 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.991455 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.993538 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.994172 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth"] Dec 01 06:45:23 crc kubenswrapper[4632]: I1201 06:45:23.998855 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000223 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000470 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000763 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000839 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000904 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.000999 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001210 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001482 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001519 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001555 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001482 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001624 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001652 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.001677 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.002318 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.002711 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.002939 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.003694 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004530 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004532 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004561 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004564 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004593 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004574 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004683 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004711 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004719 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004723 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.004789 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005040 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005082 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005223 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005271 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005305 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005307 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005323 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005385 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005479 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.005539 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006036 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006114 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006516 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006774 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006891 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006905 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006949 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.006981 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007005 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007024 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kxph4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007051 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007084 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007103 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007135 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007187 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007221 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007228 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007284 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007187 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007293 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007455 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007597 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007695 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007705 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007746 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007797 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007838 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007805 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007920 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007947 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.007922 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008025 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008039 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008112 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008186 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008192 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008432 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.008869 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4gjnq"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.009238 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.009462 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-789rs"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.009905 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.010568 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.010874 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.013514 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.014607 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g99wc"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.014629 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.014700 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.015149 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.016099 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.034101 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.036384 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.037829 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.041333 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.062702 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.065170 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.067544 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d9ddv"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.067584 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wchfw"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.067594 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.069226 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-j4hgc"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.069251 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.069760 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.069836 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.070341 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hf9tg"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.070502 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.070868 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071048 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit-dir\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071090 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-node-pullsecrets\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071106 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-client\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071121 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-image-import-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071135 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-encryption-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071158 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42b2t\" (UniqueName: \"kubernetes.io/projected/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-kube-api-access-42b2t\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071185 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-node-pullsecrets\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071189 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071231 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-serving-cert\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071249 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071264 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071301 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071454 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.071775 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-serving-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.074036 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.074200 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.074600 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.074842 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-qkxnl"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.075249 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-trusted-ca-bundle\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.075329 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.075978 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.076153 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-audit-dir\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.077075 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-image-import-ca\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.078861 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-serving-cert\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.080381 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.080585 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.081906 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4gjnq"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.081976 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.082784 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.083773 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-etcd-client\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.083990 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-encryption-config\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.084784 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nqvnc"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.086104 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.086148 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.086774 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.088612 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.088636 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wmm47"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.089147 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.089940 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.090794 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.091591 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.091689 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.092389 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.093678 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qkxnl"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.094461 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.094926 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.095761 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.096512 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.097295 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.098327 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.099223 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-789rs"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.100084 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.101071 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87rgh"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.102429 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.102580 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.102661 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87rgh"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.103471 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.107381 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.128530 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.148323 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.167515 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.173938 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.173967 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.173988 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cfw8\" (UniqueName: \"kubernetes.io/projected/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-kube-api-access-6cfw8\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174007 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/af109c96-3384-4e9d-9118-876815fe9c16-signing-key\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174071 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xxzr\" (UniqueName: \"kubernetes.io/projected/af109c96-3384-4e9d-9118-876815fe9c16-kube-api-access-6xxzr\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174138 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d95hn\" (UniqueName: \"kubernetes.io/projected/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-kube-api-access-d95hn\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174186 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lblt\" (UniqueName: \"kubernetes.io/projected/2bcb7288-f87a-444d-b3f7-7cfa98d24a95-kube-api-access-7lblt\") pod \"migrator-59844c95c7-lrlpn\" (UID: \"2bcb7288-f87a-444d-b3f7-7cfa98d24a95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174205 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/af109c96-3384-4e9d-9118-876815fe9c16-signing-cabundle\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174242 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174264 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/859a757f-9846-4176-8afd-b0e6b818563f-proxy-tls\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174290 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2mr9\" (UniqueName: \"kubernetes.io/projected/859a757f-9846-4176-8afd-b0e6b818563f-kube-api-access-p2mr9\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174341 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-metrics-tls\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174390 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-images\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174406 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174438 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.174451 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxz6c\" (UniqueName: \"kubernetes.io/projected/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-kube-api-access-qxz6c\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.175138 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-5xz74"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.175653 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.175986 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-jwp2r"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.176753 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.180888 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jwp2r"] Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.187341 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.207720 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.227339 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.247438 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.267196 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.274963 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275087 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/859a757f-9846-4176-8afd-b0e6b818563f-proxy-tls\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275178 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2mr9\" (UniqueName: \"kubernetes.io/projected/859a757f-9846-4176-8afd-b0e6b818563f-kube-api-access-p2mr9\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275251 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-metrics-tls\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275331 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-images\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275416 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275492 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275564 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxz6c\" (UniqueName: \"kubernetes.io/projected/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-kube-api-access-qxz6c\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275652 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275719 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275787 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cfw8\" (UniqueName: \"kubernetes.io/projected/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-kube-api-access-6cfw8\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275855 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/af109c96-3384-4e9d-9118-876815fe9c16-signing-key\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.275930 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xxzr\" (UniqueName: \"kubernetes.io/projected/af109c96-3384-4e9d-9118-876815fe9c16-kube-api-access-6xxzr\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.276012 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d95hn\" (UniqueName: \"kubernetes.io/projected/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-kube-api-access-d95hn\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.276086 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/af109c96-3384-4e9d-9118-876815fe9c16-signing-cabundle\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.276109 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.276149 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lblt\" (UniqueName: \"kubernetes.io/projected/2bcb7288-f87a-444d-b3f7-7cfa98d24a95-kube-api-access-7lblt\") pod \"migrator-59844c95c7-lrlpn\" (UID: \"2bcb7288-f87a-444d-b3f7-7cfa98d24a95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.276204 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-config\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.277529 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-metrics-tls\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.277987 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.287224 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.307421 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.328244 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.347473 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.372197 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.387396 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.407773 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.428091 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.437526 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/859a757f-9846-4176-8afd-b0e6b818563f-proxy-tls\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.447499 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.455974 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/859a757f-9846-4176-8afd-b0e6b818563f-images\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.467749 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.487924 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.507904 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.527328 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.548240 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.568196 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.587784 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.607919 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.627487 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.647591 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.667216 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.687900 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.707156 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.727688 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.748440 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.767343 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.787850 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.807491 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.828021 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.847907 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.867949 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.887393 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.907611 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.927969 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.948048 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.967489 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 06:45:24 crc kubenswrapper[4632]: I1201 06:45:24.987499 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.007042 4632 request.go:700] Waited for 1.003071095s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-dockercfg-5nsgg&limit=500&resourceVersion=0 Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.007784 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.027385 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.052483 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.067882 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.087384 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.107753 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.130979 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.149718 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.168012 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.187569 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.207782 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.220972 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/af109c96-3384-4e9d-9118-876815fe9c16-signing-key\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.228786 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.237498 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/af109c96-3384-4e9d-9118-876815fe9c16-signing-cabundle\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.248290 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.268188 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: E1201 06:45:25.276127 4632 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 01 06:45:25 crc kubenswrapper[4632]: E1201 06:45:25.276193 4632 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Dec 01 06:45:25 crc kubenswrapper[4632]: E1201 06:45:25.276349 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert podName:c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:25.776310754 +0000 UTC m=+135.341323727 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-jgmv4" (UID: "c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1") : failed to sync secret cache: timed out waiting for the condition Dec 01 06:45:25 crc kubenswrapper[4632]: E1201 06:45:25.276445 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config podName:c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1 nodeName:}" failed. No retries permitted until 2025-12-01 06:45:25.776434438 +0000 UTC m=+135.341447411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config") pod "kube-storage-version-migrator-operator-b67b599dd-jgmv4" (UID: "c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1") : failed to sync configmap cache: timed out waiting for the condition Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.287696 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.308071 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.327829 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.348341 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.367983 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.387402 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.407843 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.428141 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.447449 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.467287 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.487402 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.507470 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.527855 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.547851 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.588566 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.607824 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.627392 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.647440 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.667710 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.687597 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.708532 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.747272 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.768104 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.787534 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.789568 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.789636 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.790233 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.791884 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.807927 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.839179 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42b2t\" (UniqueName: \"kubernetes.io/projected/336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f-kube-api-access-42b2t\") pod \"apiserver-76f77b778f-kxph4\" (UID: \"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f\") " pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.848178 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.868203 4632 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.888050 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.907207 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.927554 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.948013 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.967230 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 06:45:25 crc kubenswrapper[4632]: I1201 06:45:25.988283 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.007208 4632 request.go:700] Waited for 1.830227967s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/configmaps?fieldSelector=metadata.name%3Ddns-default&limit=500&resourceVersion=0 Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.008189 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.039527 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2mr9\" (UniqueName: \"kubernetes.io/projected/859a757f-9846-4176-8afd-b0e6b818563f-kube-api-access-p2mr9\") pod \"machine-config-operator-74547568cd-nr8wb\" (UID: \"859a757f-9846-4176-8afd-b0e6b818563f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.065160 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxz6c\" (UniqueName: \"kubernetes.io/projected/d2cfa811-e0b5-403d-b7cb-fdfc589865d4-kube-api-access-qxz6c\") pod \"openshift-apiserver-operator-796bbdcf4f-jdgfp\" (UID: \"d2cfa811-e0b5-403d-b7cb-fdfc589865d4\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.068045 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.079129 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cfw8\" (UniqueName: \"kubernetes.io/projected/c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1-kube-api-access-6cfw8\") pod \"kube-storage-version-migrator-operator-b67b599dd-jgmv4\" (UID: \"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.098570 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xxzr\" (UniqueName: \"kubernetes.io/projected/af109c96-3384-4e9d-9118-876815fe9c16-kube-api-access-6xxzr\") pod \"service-ca-9c57cc56f-4gjnq\" (UID: \"af109c96-3384-4e9d-9118-876815fe9c16\") " pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.122268 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d95hn\" (UniqueName: \"kubernetes.io/projected/f4318c7d-d948-4676-a3e4-885ffe2ae1ae-kube-api-access-d95hn\") pod \"dns-operator-744455d44c-hf9tg\" (UID: \"f4318c7d-d948-4676-a3e4-885ffe2ae1ae\") " pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.139509 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lblt\" (UniqueName: \"kubernetes.io/projected/2bcb7288-f87a-444d-b3f7-7cfa98d24a95-kube-api-access-7lblt\") pod \"migrator-59844c95c7-lrlpn\" (UID: \"2bcb7288-f87a-444d-b3f7-7cfa98d24a95\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192397 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e354731f-7ed1-47e3-8d64-7d55f1613100-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192429 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192452 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c439cd75-04a3-4881-8878-44f0a91f2088-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192467 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-service-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192482 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192496 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25r9n\" (UniqueName: \"kubernetes.io/projected/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-kube-api-access-25r9n\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192540 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c7cadd7-3773-48bd-a595-631a38d4d693-config\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192599 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-profile-collector-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192648 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192716 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-serving-cert\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.192759 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-config\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.192968 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:26.692956988 +0000 UTC m=+136.257969961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193186 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qwhw\" (UniqueName: \"kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193265 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193286 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/39126103-ae29-4563-af2f-cd549bb98ebb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193335 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-config\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193347 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-kxph4"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193391 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk2sv\" (UniqueName: \"kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193408 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8a77399a-5697-4f2d-8ec5-8e05106a356d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193448 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193485 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q89k4\" (UniqueName: \"kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193501 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193521 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6jwp\" (UniqueName: \"kubernetes.io/projected/e90e3510-a941-487a-af96-639fdc977fbb-kube-api-access-c6jwp\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193536 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193579 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193594 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193606 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-srv-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193625 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193638 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193680 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a77399a-5697-4f2d-8ec5-8e05106a356d-serving-cert\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193696 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72vqw\" (UniqueName: \"kubernetes.io/projected/8906a9ad-be53-4475-8b99-c2895ff794fa-kube-api-access-72vqw\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193710 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzfzr\" (UniqueName: \"kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193739 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw6x9\" (UniqueName: \"kubernetes.io/projected/978db707-ffa7-491e-b032-59db1b5aa9c9-kube-api-access-sw6x9\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193759 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7fa3078d-93ff-452f-a746-010c568af171-audit-dir\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193771 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193785 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xqz7\" (UniqueName: \"kubernetes.io/projected/0baef212-8f33-4c4f-b5e2-af5cad0333d5-kube-api-access-6xqz7\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193832 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193857 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193876 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193894 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193912 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8906a9ad-be53-4475-8b99-c2895ff794fa-service-ca-bundle\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193929 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shwnk\" (UniqueName: \"kubernetes.io/projected/6d196250-dda3-49c7-8f2d-4e4888010659-kube-api-access-shwnk\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193944 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.193980 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-client\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194013 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a7f54b-f039-4564-a917-b643351932f5-config\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194031 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-trusted-ca\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194047 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c7cadd7-3773-48bd-a595-631a38d4d693-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194060 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-encryption-config\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194089 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b695h\" (UniqueName: \"kubernetes.io/projected/1114abcc-5526-4ef9-8e98-8fe888a77072-kube-api-access-b695h\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194104 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194119 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cb6f81ed-0172-4f32-97cb-b2911644523a-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194131 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194142 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194154 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-srv-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194168 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39126103-ae29-4563-af2f-cd549bb98ebb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194182 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjjsv\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-kube-api-access-cjjsv\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194194 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194231 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgs24\" (UniqueName: \"kubernetes.io/projected/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-kube-api-access-rgs24\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194286 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194305 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-stats-auth\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194368 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q72b\" (UniqueName: \"kubernetes.io/projected/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-kube-api-access-5q72b\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194396 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194418 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-apiservice-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194432 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194446 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194460 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c7cadd7-3773-48bd-a595-631a38d4d693-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194473 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194489 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-serving-cert\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194509 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194530 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fn9cs\" (UniqueName: \"kubernetes.io/projected/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-kube-api-access-fn9cs\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194557 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194592 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-images\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194606 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-audit-policies\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194629 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194658 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-service-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194672 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e90e3510-a941-487a-af96-639fdc977fbb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194687 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194701 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194742 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194757 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq5gf\" (UniqueName: \"kubernetes.io/projected/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-kube-api-access-dq5gf\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194787 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-config\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194819 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4a7f54b-f039-4564-a917-b643351932f5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194835 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194848 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-tmpfs\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194861 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194873 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c439cd75-04a3-4881-8878-44f0a91f2088-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194925 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194940 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194953 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh9d7\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194967 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.194996 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-config\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195010 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g4lz\" (UniqueName: \"kubernetes.io/projected/8a77399a-5697-4f2d-8ec5-8e05106a356d-kube-api-access-7g4lz\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195024 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195038 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb6f81ed-0172-4f32-97cb-b2911644523a-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195080 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-etcd-client\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195100 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-metrics-certs\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195115 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195144 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1114abcc-5526-4ef9-8e98-8fe888a77072-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195159 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvjvx\" (UniqueName: \"kubernetes.io/projected/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-kube-api-access-vvjvx\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195180 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-serving-cert\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195192 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c439cd75-04a3-4881-8878-44f0a91f2088-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195221 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195237 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-serving-cert\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195252 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195269 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195401 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4bdc3484-8840-4d88-83c9-32ba1da693fa-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195424 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195440 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb846\" (UniqueName: \"kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195487 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-serving-cert\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195502 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlqcg\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-kube-api-access-rlqcg\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195584 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgnf4\" (UniqueName: \"kubernetes.io/projected/e354731f-7ed1-47e3-8d64-7d55f1613100-kube-api-access-hgnf4\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195628 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195710 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld2jx\" (UniqueName: \"kubernetes.io/projected/7fa3078d-93ff-452f-a746-010c568af171-kube-api-access-ld2jx\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195755 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvqlf\" (UniqueName: \"kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195804 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-config\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195832 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4bdc3484-8840-4d88-83c9-32ba1da693fa-proxy-tls\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195867 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195902 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-webhook-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195931 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a7f54b-f039-4564-a917-b643351932f5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195954 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d196250-dda3-49c7-8f2d-4e4888010659-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.195992 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196008 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196029 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-default-certificate\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196046 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8shzv\" (UniqueName: \"kubernetes.io/projected/2181e6e0-5901-48a6-b44f-621e41a161d5-kube-api-access-8shzv\") pod \"downloads-7954f5f757-wchfw\" (UID: \"2181e6e0-5901-48a6-b44f-621e41a161d5\") " pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196087 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvb4h\" (UniqueName: \"kubernetes.io/projected/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-kube-api-access-zvb4h\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196116 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grk9k\" (UniqueName: \"kubernetes.io/projected/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-kube-api-access-grk9k\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196138 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjqr8\" (UniqueName: \"kubernetes.io/projected/4bdc3484-8840-4d88-83c9-32ba1da693fa-kube-api-access-gjqr8\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196161 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196196 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.196218 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.198830 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod336ac52f_ee34_4bab_a0c6_d8c1d40d6e2f.slice/crio-3c5a7bdd72dee2ecf8cae57d6f870dd860ab57523c1dff5392fb5af39bcb22d1 WatchSource:0}: Error finding container 3c5a7bdd72dee2ecf8cae57d6f870dd860ab57523c1dff5392fb5af39bcb22d1: Status 404 returned error can't find the container with id 3c5a7bdd72dee2ecf8cae57d6f870dd860ab57523c1dff5392fb5af39bcb22d1 Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.223230 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.268796 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.293007 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.296991 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297155 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6jwp\" (UniqueName: \"kubernetes.io/projected/e90e3510-a941-487a-af96-639fdc977fbb-kube-api-access-c6jwp\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297178 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297194 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297212 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297225 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-srv-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297239 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297255 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297269 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a77399a-5697-4f2d-8ec5-8e05106a356d-serving-cert\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297285 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw6x9\" (UniqueName: \"kubernetes.io/projected/978db707-ffa7-491e-b032-59db1b5aa9c9-kube-api-access-sw6x9\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297299 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72vqw\" (UniqueName: \"kubernetes.io/projected/8906a9ad-be53-4475-8b99-c2895ff794fa-kube-api-access-72vqw\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297312 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzfzr\" (UniqueName: \"kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297325 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7fa3078d-93ff-452f-a746-010c568af171-audit-dir\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297341 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297371 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297386 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297401 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xqz7\" (UniqueName: \"kubernetes.io/projected/0baef212-8f33-4c4f-b5e2-af5cad0333d5-kube-api-access-6xqz7\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297416 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297431 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297448 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-plugins-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297472 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8906a9ad-be53-4475-8b99-c2895ff794fa-service-ca-bundle\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297486 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-client\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297500 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shwnk\" (UniqueName: \"kubernetes.io/projected/6d196250-dda3-49c7-8f2d-4e4888010659-kube-api-access-shwnk\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297514 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297536 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a7f54b-f039-4564-a917-b643351932f5-config\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297550 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d206d4a-17c1-4750-9984-35ff08f92e2e-cert\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297565 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-encryption-config\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297579 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b695h\" (UniqueName: \"kubernetes.io/projected/1114abcc-5526-4ef9-8e98-8fe888a77072-kube-api-access-b695h\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297593 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-trusted-ca\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297607 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c7cadd7-3773-48bd-a595-631a38d4d693-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297620 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297633 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297645 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-srv-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297669 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297683 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cb6f81ed-0172-4f32-97cb-b2911644523a-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297700 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39126103-ae29-4563-af2f-cd549bb98ebb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297716 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88rfr\" (UniqueName: \"kubernetes.io/projected/514794d8-40b4-4938-9d15-aa39764ac45c-kube-api-access-88rfr\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297733 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjjsv\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-kube-api-access-cjjsv\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297748 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297780 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgs24\" (UniqueName: \"kubernetes.io/projected/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-kube-api-access-rgs24\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297794 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297808 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-stats-auth\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297825 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/514794d8-40b4-4938-9d15-aa39764ac45c-metrics-tls\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.298306 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:26.798290667 +0000 UTC m=+136.363303639 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299531 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.297839 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q72b\" (UniqueName: \"kubernetes.io/projected/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-kube-api-access-5q72b\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299624 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-apiservice-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299642 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299660 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw5tg\" (UniqueName: \"kubernetes.io/projected/3c731d5d-a428-4eca-aaf3-441b5048a034-kube-api-access-pw5tg\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299684 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299710 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299726 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-serving-cert\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299744 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c7cadd7-3773-48bd-a595-631a38d4d693-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299758 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299774 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299789 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fn9cs\" (UniqueName: \"kubernetes.io/projected/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-kube-api-access-fn9cs\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299820 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-csi-data-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299838 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299853 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-images\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299868 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-socket-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299883 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-service-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299897 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e90e3510-a941-487a-af96-639fdc977fbb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299922 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-audit-policies\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299936 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299952 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299970 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.299986 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300013 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4a7f54b-f039-4564-a917-b643351932f5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300027 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq5gf\" (UniqueName: \"kubernetes.io/projected/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-kube-api-access-dq5gf\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300046 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-config\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300061 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-tmpfs\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300090 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300104 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zddsw\" (UniqueName: \"kubernetes.io/projected/f324aefe-58e1-4638-915b-8cc658d13c5d-kube-api-access-zddsw\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300119 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300135 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300149 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c439cd75-04a3-4881-8878-44f0a91f2088-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300163 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300178 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh9d7\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300191 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300205 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-config\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300221 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g4lz\" (UniqueName: \"kubernetes.io/projected/8a77399a-5697-4f2d-8ec5-8e05106a356d-kube-api-access-7g4lz\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300236 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300252 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb6f81ed-0172-4f32-97cb-b2911644523a-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300267 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-etcd-client\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300300 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-metrics-certs\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300314 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvjvx\" (UniqueName: \"kubernetes.io/projected/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-kube-api-access-vvjvx\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300329 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/514794d8-40b4-4938-9d15-aa39764ac45c-config-volume\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300388 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-serving-cert\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300403 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300417 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1114abcc-5526-4ef9-8e98-8fe888a77072-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300432 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c439cd75-04a3-4881-8878-44f0a91f2088-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300452 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300469 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc56k\" (UniqueName: \"kubernetes.io/projected/4d206d4a-17c1-4750-9984-35ff08f92e2e-kube-api-access-xc56k\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300495 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300510 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-serving-cert\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300538 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4bdc3484-8840-4d88-83c9-32ba1da693fa-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300553 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300579 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300601 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb846\" (UniqueName: \"kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300618 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgnf4\" (UniqueName: \"kubernetes.io/projected/e354731f-7ed1-47e3-8d64-7d55f1613100-kube-api-access-hgnf4\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300632 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-serving-cert\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300646 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlqcg\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-kube-api-access-rlqcg\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300664 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300680 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld2jx\" (UniqueName: \"kubernetes.io/projected/7fa3078d-93ff-452f-a746-010c568af171-kube-api-access-ld2jx\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300694 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvqlf\" (UniqueName: \"kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300717 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-config\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300732 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300746 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4bdc3484-8840-4d88-83c9-32ba1da693fa-proxy-tls\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300760 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-webhook-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300784 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a7f54b-f039-4564-a917-b643351932f5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300798 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d196250-dda3-49c7-8f2d-4e4888010659-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300811 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-default-certificate\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300826 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8shzv\" (UniqueName: \"kubernetes.io/projected/2181e6e0-5901-48a6-b44f-621e41a161d5-kube-api-access-8shzv\") pod \"downloads-7954f5f757-wchfw\" (UID: \"2181e6e0-5901-48a6-b44f-621e41a161d5\") " pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300839 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-certs\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300854 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvb4h\" (UniqueName: \"kubernetes.io/projected/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-kube-api-access-zvb4h\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300869 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300884 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300907 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjqr8\" (UniqueName: \"kubernetes.io/projected/4bdc3484-8840-4d88-83c9-32ba1da693fa-kube-api-access-gjqr8\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300922 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grk9k\" (UniqueName: \"kubernetes.io/projected/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-kube-api-access-grk9k\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300944 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300959 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300974 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.300989 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c439cd75-04a3-4881-8878-44f0a91f2088-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301005 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e354731f-7ed1-47e3-8d64-7d55f1613100-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301023 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301039 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-service-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301054 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301080 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25r9n\" (UniqueName: \"kubernetes.io/projected/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-kube-api-access-25r9n\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301094 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-mountpoint-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301119 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-profile-collector-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301134 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c7cadd7-3773-48bd-a595-631a38d4d693-config\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301149 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-node-bootstrap-token\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301164 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-config\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301179 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qwhw\" (UniqueName: \"kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301194 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-registration-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301221 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301236 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-serving-cert\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301252 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301269 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/39126103-ae29-4563-af2f-cd549bb98ebb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301284 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk2sv\" (UniqueName: \"kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301299 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8a77399a-5697-4f2d-8ec5-8e05106a356d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301315 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301329 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-config\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301343 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q89k4\" (UniqueName: \"kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301374 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301444 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7fa3078d-93ff-452f-a746-010c568af171-audit-dir\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.301771 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.302435 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.302344 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8906a9ad-be53-4475-8b99-c2895ff794fa-service-ca-bundle\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.302868 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.303147 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.303221 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.303643 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-srv-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.303982 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.304150 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.304920 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305100 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305208 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305501 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4a7f54b-f039-4564-a917-b643351932f5-config\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305661 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305776 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-client\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.305102 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.308023 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.308030 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-images\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309072 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309151 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309170 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309523 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309604 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-config\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.309791 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.310255 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-stats-auth\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.310794 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-config\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.311113 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.311456 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:26.811445005 +0000 UTC m=+136.376457978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.311524 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7fa3078d-93ff-452f-a746-010c568af171-audit-policies\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.311718 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-config\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.311949 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-serving-cert\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.311971 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c439cd75-04a3-4881-8878-44f0a91f2088-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.312054 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.316939 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-trusted-ca\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.317478 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.317582 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c7cadd7-3773-48bd-a595-631a38d4d693-config\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.318873 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.319516 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-metrics-certs\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.319560 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e90e3510-a941-487a-af96-639fdc977fbb-config\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.321774 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-tmpfs\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.321977 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.322123 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.322613 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-service-ca-bundle\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.322804 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.322846 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/8a77399a-5697-4f2d-8ec5-8e05106a356d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.323077 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/4bdc3484-8840-4d88-83c9-32ba1da693fa-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.323226 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-serving-cert\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.323324 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.323422 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-apiservice-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.323720 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.324031 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cb6f81ed-0172-4f32-97cb-b2911644523a-trusted-ca\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.324050 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-serving-cert\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.325308 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/39126103-ae29-4563-af2f-cd549bb98ebb-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.325713 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.326372 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-config\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.326468 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-etcd-service-ca\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.327239 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.329786 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-auth-proxy-config\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.330288 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e354731f-7ed1-47e3-8d64-7d55f1613100-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.330705 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e90e3510-a941-487a-af96-639fdc977fbb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.330886 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-srv-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.331175 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.331204 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7c7cadd7-3773-48bd-a595-631a38d4d693-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.331410 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4a7f54b-f039-4564-a917-b643351932f5-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.332482 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/978db707-ffa7-491e-b032-59db1b5aa9c9-profile-collector-cert\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.333443 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1114abcc-5526-4ef9-8e98-8fe888a77072-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.334254 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.334429 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/39126103-ae29-4563-af2f-cd549bb98ebb-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.335380 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.335610 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.335646 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cb6f81ed-0172-4f32-97cb-b2911644523a-metrics-tls\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.338689 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c439cd75-04a3-4881-8878-44f0a91f2088-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.341887 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d196250-dda3-49c7-8f2d-4e4888010659-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.341904 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.342059 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-serving-cert\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.342079 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.342539 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.343186 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.343486 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.343742 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6jwp\" (UniqueName: \"kubernetes.io/projected/e90e3510-a941-487a-af96-639fdc977fbb-kube-api-access-c6jwp\") pod \"machine-api-operator-5694c8668f-g99wc\" (UID: \"e90e3510-a941-487a-af96-639fdc977fbb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.343964 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-serving-cert\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.344565 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.348018 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-encryption-config\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.352186 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.352756 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a77399a-5697-4f2d-8ec5-8e05106a356d-serving-cert\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.354095 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-webhook-cert\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.356741 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/8906a9ad-be53-4475-8b99-c2895ff794fa-default-certificate\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.356774 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/4bdc3484-8840-4d88-83c9-32ba1da693fa-proxy-tls\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.356781 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/0baef212-8f33-4c4f-b5e2-af5cad0333d5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.356867 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-machine-approver-tls\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.357904 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.358306 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.358687 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.358704 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7fa3078d-93ff-452f-a746-010c568af171-etcd-client\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.359677 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.359789 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.364932 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shwnk\" (UniqueName: \"kubernetes.io/projected/6d196250-dda3-49c7-8f2d-4e4888010659-kube-api-access-shwnk\") pod \"package-server-manager-789f6589d5-vtnjq\" (UID: \"6d196250-dda3-49c7-8f2d-4e4888010659\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.372420 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.382883 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xqz7\" (UniqueName: \"kubernetes.io/projected/0baef212-8f33-4c4f-b5e2-af5cad0333d5-kube-api-access-6xqz7\") pod \"olm-operator-6b444d44fb-xwqwc\" (UID: \"0baef212-8f33-4c4f-b5e2-af5cad0333d5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.400588 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.401530 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402326 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.402459 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:26.902433758 +0000 UTC m=+136.467446731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402545 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/514794d8-40b4-4938-9d15-aa39764ac45c-config-volume\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402578 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc56k\" (UniqueName: \"kubernetes.io/projected/4d206d4a-17c1-4750-9984-35ff08f92e2e-kube-api-access-xc56k\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402651 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-certs\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402699 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-node-bootstrap-token\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402714 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-mountpoint-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402731 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402753 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-registration-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402805 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-plugins-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402823 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d206d4a-17c1-4750-9984-35ff08f92e2e-cert\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402848 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88rfr\" (UniqueName: \"kubernetes.io/projected/514794d8-40b4-4938-9d15-aa39764ac45c-kube-api-access-88rfr\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402875 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/514794d8-40b4-4938-9d15-aa39764ac45c-metrics-tls\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402895 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw5tg\" (UniqueName: \"kubernetes.io/projected/3c731d5d-a428-4eca-aaf3-441b5048a034-kube-api-access-pw5tg\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402915 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-csi-data-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402930 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-socket-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.402958 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zddsw\" (UniqueName: \"kubernetes.io/projected/f324aefe-58e1-4638-915b-8cc658d13c5d-kube-api-access-zddsw\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.403537 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-csi-data-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.403729 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:26.903719836 +0000 UTC m=+136.468732809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.403905 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-socket-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.403954 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-plugins-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.403978 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-mountpoint-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.403990 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f324aefe-58e1-4638-915b-8cc658d13c5d-registration-dir\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.406114 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-certs\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.406192 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4d206d4a-17c1-4750-9984-35ff08f92e2e-cert\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.406583 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/514794d8-40b4-4938-9d15-aa39764ac45c-config-volume\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.407808 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/3c731d5d-a428-4eca-aaf3-441b5048a034-node-bootstrap-token\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.408250 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/514794d8-40b4-4938-9d15-aa39764ac45c-metrics-tls\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.420075 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.421241 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgs24\" (UniqueName: \"kubernetes.io/projected/b8a033c5-0e8d-4f09-8bb8-5768d1a370a4-kube-api-access-rgs24\") pod \"openshift-controller-manager-operator-756b6f6bc6-xpv92\" (UID: \"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.440090 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b695h\" (UniqueName: \"kubernetes.io/projected/1114abcc-5526-4ef9-8e98-8fe888a77072-kube-api-access-b695h\") pod \"multus-admission-controller-857f4d67dd-wmm47\" (UID: \"1114abcc-5526-4ef9-8e98-8fe888a77072\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.451817 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod859a757f_9846_4176_8afd_b0e6b818563f.slice/crio-e2abeb714cfc5a25eb1f481497ec7516221b88e48989680b39d68448f264e9b2 WatchSource:0}: Error finding container e2abeb714cfc5a25eb1f481497ec7516221b88e48989680b39d68448f264e9b2: Status 404 returned error can't find the container with id e2abeb714cfc5a25eb1f481497ec7516221b88e48989680b39d68448f264e9b2 Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.460311 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvb4h\" (UniqueName: \"kubernetes.io/projected/93c3de01-c9ea-4c65-8d6d-5cce60e89f5d-kube-api-access-zvb4h\") pod \"packageserver-d55dfcdfc-58hth\" (UID: \"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.464751 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.479809 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvjvx\" (UniqueName: \"kubernetes.io/projected/93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c-kube-api-access-vvjvx\") pod \"machine-approver-56656f9798-ttl89\" (UID: \"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.490736 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hf9tg"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.500281 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c439cd75-04a3-4881-8878-44f0a91f2088-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-v966p\" (UID: \"c439cd75-04a3-4881-8878-44f0a91f2088\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.504173 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.504277 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4318c7d_d948_4676_a3e4_885ffe2ae1ae.slice/crio-a6fa1fe37f0590e5c58c7dbb2656ecb791e71049cd53b04210e1341fb8620a9f WatchSource:0}: Error finding container a6fa1fe37f0590e5c58c7dbb2656ecb791e71049cd53b04210e1341fb8620a9f: Status 404 returned error can't find the container with id a6fa1fe37f0590e5c58c7dbb2656ecb791e71049cd53b04210e1341fb8620a9f Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.504467 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.004450384 +0000 UTC m=+136.569463357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.504649 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.505088 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.005079602 +0000 UTC m=+136.570092574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.519378 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q72b\" (UniqueName: \"kubernetes.io/projected/887e4a51-f7e3-4e2d-aa8e-10cd0793898b-kube-api-access-5q72b\") pod \"authentication-operator-69f744f599-nqvnc\" (UID: \"887e4a51-f7e3-4e2d-aa8e-10cd0793898b\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.540230 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb846\" (UniqueName: \"kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846\") pod \"oauth-openshift-558db77b4-lw8pk\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.545938 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.550927 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.561205 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk2sv\" (UniqueName: \"kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv\") pod \"marketplace-operator-79b997595-6vgtg\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.583033 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-g99wc"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.589007 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8shzv\" (UniqueName: \"kubernetes.io/projected/2181e6e0-5901-48a6-b44f-621e41a161d5-kube-api-access-8shzv\") pod \"downloads-7954f5f757-wchfw\" (UID: \"2181e6e0-5901-48a6-b44f-621e41a161d5\") " pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.598955 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qwhw\" (UniqueName: \"kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw\") pod \"route-controller-manager-6576b87f9c-dxpwr\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.602165 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode90e3510_a941_487a_af96_639fdc977fbb.slice/crio-a052b1ebf84428bbd1aff0ba0c524db263de28a23d9ab1f6a9d31452fdb8c342 WatchSource:0}: Error finding container a052b1ebf84428bbd1aff0ba0c524db263de28a23d9ab1f6a9d31452fdb8c342: Status 404 returned error can't find the container with id a052b1ebf84428bbd1aff0ba0c524db263de28a23d9ab1f6a9d31452fdb8c342 Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.604061 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.605628 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.605757 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.105738444 +0000 UTC m=+136.670751417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.606053 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.606373 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.106346722 +0000 UTC m=+136.671359695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.608042 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.613558 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.619238 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.624375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g4lz\" (UniqueName: \"kubernetes.io/projected/8a77399a-5697-4f2d-8ec5-8e05106a356d-kube-api-access-7g4lz\") pod \"openshift-config-operator-7777fb866f-gkwgl\" (UID: \"8a77399a-5697-4f2d-8ec5-8e05106a356d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.636361 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.639370 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c7cadd7-3773-48bd-a595-631a38d4d693-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-jrfmt\" (UID: \"7c7cadd7-3773-48bd-a595-631a38d4d693\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.647052 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.663580 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzfzr\" (UniqueName: \"kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr\") pod \"console-f9d7485db-52x8r\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.684556 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjjsv\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-kube-api-access-cjjsv\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.687519 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.692850 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.706714 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.706957 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.206939329 +0000 UTC m=+136.771952302 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.707203 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.707604 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.207593304 +0000 UTC m=+136.772606276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.712041 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cb6f81ed-0172-4f32-97cb-b2911644523a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-j6rr8\" (UID: \"cb6f81ed-0172-4f32-97cb-b2911644523a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.747594 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlqcg\" (UniqueName: \"kubernetes.io/projected/39126103-ae29-4563-af2f-cd549bb98ebb-kube-api-access-rlqcg\") pod \"cluster-image-registry-operator-dc59b4c8b-cn2l5\" (UID: \"39126103-ae29-4563-af2f-cd549bb98ebb\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.748643 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4gjnq"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.760445 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4"] Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.764999 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgnf4\" (UniqueName: \"kubernetes.io/projected/e354731f-7ed1-47e3-8d64-7d55f1613100-kube-api-access-hgnf4\") pod \"control-plane-machine-set-operator-78cbb6b69f-mxcl4\" (UID: \"e354731f-7ed1-47e3-8d64-7d55f1613100\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.772394 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf109c96_3384_4e9d_9118_876815fe9c16.slice/crio-2f0ffb4a30f9312e6ee1f437114e42f97f86e14ad2ef4e013ef1624316e2a9f1 WatchSource:0}: Error finding container 2f0ffb4a30f9312e6ee1f437114e42f97f86e14ad2ef4e013ef1624316e2a9f1: Status 404 returned error can't find the container with id 2f0ffb4a30f9312e6ee1f437114e42f97f86e14ad2ef4e013ef1624316e2a9f1 Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.776037 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:26 crc kubenswrapper[4632]: W1201 06:45:26.781846 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1c0a7e7_31d1_4dc7_a3b2_9d38c60075c1.slice/crio-5c101debc95610952bfcfdafd0ff74694c0970122151a46e869827ead15d9b3d WatchSource:0}: Error finding container 5c101debc95610952bfcfdafd0ff74694c0970122151a46e869827ead15d9b3d: Status 404 returned error can't find the container with id 5c101debc95610952bfcfdafd0ff74694c0970122151a46e869827ead15d9b3d Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.785496 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25r9n\" (UniqueName: \"kubernetes.io/projected/ae14b3fb-8e47-4b2c-af2a-689240f1dae9-kube-api-access-25r9n\") pod \"cluster-samples-operator-665b6dd947-m2fgg\" (UID: \"ae14b3fb-8e47-4b2c-af2a-689240f1dae9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.796676 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.800691 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld2jx\" (UniqueName: \"kubernetes.io/projected/7fa3078d-93ff-452f-a746-010c568af171-kube-api-access-ld2jx\") pod \"apiserver-7bbb656c7d-lp6wn\" (UID: \"7fa3078d-93ff-452f-a746-010c568af171\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.800869 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.806577 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.808441 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.808781 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.308760395 +0000 UTC m=+136.873773368 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.808995 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.809287 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.309279325 +0000 UTC m=+136.874292299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.819812 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fn9cs\" (UniqueName: \"kubernetes.io/projected/5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3-kube-api-access-fn9cs\") pod \"etcd-operator-b45778765-789rs\" (UID: \"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3\") " pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.828340 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.836940 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.840508 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.847053 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.856151 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.863955 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.864515 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh9d7\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.885985 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvqlf\" (UniqueName: \"kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf\") pod \"controller-manager-879f6c89f-d8bfn\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.901118 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjqr8\" (UniqueName: \"kubernetes.io/projected/4bdc3484-8840-4d88-83c9-32ba1da693fa-kube-api-access-gjqr8\") pod \"machine-config-controller-84d6567774-2spdj\" (UID: \"4bdc3484-8840-4d88-83c9-32ba1da693fa\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.909872 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.910044 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.409994464 +0000 UTC m=+136.975007437 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.910216 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:26 crc kubenswrapper[4632]: E1201 06:45:26.911277 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.411266656 +0000 UTC m=+136.976279629 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.930558 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw6x9\" (UniqueName: \"kubernetes.io/projected/978db707-ffa7-491e-b032-59db1b5aa9c9-kube-api-access-sw6x9\") pod \"catalog-operator-68c6474976-7r8sp\" (UID: \"978db707-ffa7-491e-b032-59db1b5aa9c9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.942470 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.943139 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q89k4\" (UniqueName: \"kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4\") pod \"collect-profiles-29409525-ndjg6\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.970437 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.976665 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:26 crc kubenswrapper[4632]: I1201 06:45:26.988460 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c4a7f54b-f039-4564-a917-b643351932f5-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-br5vj\" (UID: \"c4a7f54b-f039-4564-a917-b643351932f5\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.007799 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.009620 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.016052 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.016403 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.51638061 +0000 UTC m=+137.081393584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.016616 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.016979 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.51696852 +0000 UTC m=+137.081981493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.020227 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grk9k\" (UniqueName: \"kubernetes.io/projected/192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe-kube-api-access-grk9k\") pod \"console-operator-58897d9998-d9ddv\" (UID: \"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe\") " pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.031415 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq5gf\" (UniqueName: \"kubernetes.io/projected/7a1eaf03-bfc0-4185-ad22-55e6ef75dae5-kube-api-access-dq5gf\") pod \"service-ca-operator-777779d784-kmxd4\" (UID: \"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.044270 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.050029 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.050243 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72vqw\" (UniqueName: \"kubernetes.io/projected/8906a9ad-be53-4475-8b99-c2895ff794fa-kube-api-access-72vqw\") pod \"router-default-5444994796-j4hgc\" (UID: \"8906a9ad-be53-4475-8b99-c2895ff794fa\") " pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.051962 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zddsw\" (UniqueName: \"kubernetes.io/projected/f324aefe-58e1-4638-915b-8cc658d13c5d-kube-api-access-zddsw\") pod \"csi-hostpathplugin-87rgh\" (UID: \"f324aefe-58e1-4638-915b-8cc658d13c5d\") " pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.062776 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc56k\" (UniqueName: \"kubernetes.io/projected/4d206d4a-17c1-4750-9984-35ff08f92e2e-kube-api-access-xc56k\") pod \"ingress-canary-qkxnl\" (UID: \"4d206d4a-17c1-4750-9984-35ff08f92e2e\") " pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.092154 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw5tg\" (UniqueName: \"kubernetes.io/projected/3c731d5d-a428-4eca-aaf3-441b5048a034-kube-api-access-pw5tg\") pod \"machine-config-server-5xz74\" (UID: \"3c731d5d-a428-4eca-aaf3-441b5048a034\") " pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.102874 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88rfr\" (UniqueName: \"kubernetes.io/projected/514794d8-40b4-4938-9d15-aa39764ac45c-kube-api-access-88rfr\") pod \"dns-default-jwp2r\" (UID: \"514794d8-40b4-4938-9d15-aa39764ac45c\") " pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.111928 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.113532 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.117846 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.118129 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.118406 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.618387648 +0000 UTC m=+137.183400621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.152669 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-wmm47"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.155531 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.171270 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" event={"ID":"d2cfa811-e0b5-403d-b7cb-fdfc589865d4","Type":"ContainerStarted","Data":"a685b31153ee3574938a33835b2bba00ae32560036bef49bfc0179910af53dec"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.171315 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" event={"ID":"d2cfa811-e0b5-403d-b7cb-fdfc589865d4","Type":"ContainerStarted","Data":"054238f2dd75b31b42c0f8e7ecb28e60ae5b51d5ec10dc2af54b961fdc39e7f4"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.181775 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" event={"ID":"af109c96-3384-4e9d-9118-876815fe9c16","Type":"ContainerStarted","Data":"71de51adad4f2c9ce37dd1057f59247263841042d65f663fe488c8cdd1068f3c"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.181806 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" event={"ID":"af109c96-3384-4e9d-9118-876815fe9c16","Type":"ContainerStarted","Data":"2f0ffb4a30f9312e6ee1f437114e42f97f86e14ad2ef4e013ef1624316e2a9f1"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.184690 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.186729 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.187865 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" event={"ID":"f4318c7d-d948-4676-a3e4-885ffe2ae1ae","Type":"ContainerStarted","Data":"049e48a21c1cbcfec175287f9e2815f1c7c22e4cfde8dbab5f25aaee8e79fb4d"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.187899 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" event={"ID":"f4318c7d-d948-4676-a3e4-885ffe2ae1ae","Type":"ContainerStarted","Data":"9756e00f0127f4f6fee4cb709ec6f144fd0203990f0b53c93267ab1905b9a440"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.187919 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" event={"ID":"f4318c7d-d948-4676-a3e4-885ffe2ae1ae","Type":"ContainerStarted","Data":"a6fa1fe37f0590e5c58c7dbb2656ecb791e71049cd53b04210e1341fb8620a9f"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.190699 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" event={"ID":"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4","Type":"ContainerStarted","Data":"a8ff4aa3e33f837cd761b2b403aa875bb24403df42fe7e882fbf2b0fdba2023b"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.190726 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" event={"ID":"b8a033c5-0e8d-4f09-8bb8-5768d1a370a4","Type":"ContainerStarted","Data":"d21dc69d1fff4d9804966588de26bd51c80a1cd6d3583034fa4d9f78ad307845"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.193375 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" event={"ID":"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d","Type":"ContainerStarted","Data":"26b539ab7fb41f5220f2f1168b763d35b36c1103e5aa6f971620d1da14fddbbb"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.194598 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" event={"ID":"c439cd75-04a3-4881-8878-44f0a91f2088","Type":"ContainerStarted","Data":"81465d8549696f0b03b1ee6e55cd57a3596a77f5316e3954b0d1616134ff27fb"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.196784 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" event={"ID":"e90e3510-a941-487a-af96-639fdc977fbb","Type":"ContainerStarted","Data":"5c192cfd7fafd5f9873379c5b0139c3c4f43b91547d1ca31f28db9f788be2a78"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.196816 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" event={"ID":"e90e3510-a941-487a-af96-639fdc977fbb","Type":"ContainerStarted","Data":"97d96b440c79d26a4b13ceb9a8db937b421edcc67aaa0f4469562e84183acce2"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.196826 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" event={"ID":"e90e3510-a941-487a-af96-639fdc977fbb","Type":"ContainerStarted","Data":"a052b1ebf84428bbd1aff0ba0c524db263de28a23d9ab1f6a9d31452fdb8c342"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.198995 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" event={"ID":"859a757f-9846-4176-8afd-b0e6b818563f","Type":"ContainerStarted","Data":"5ec93104e431fb92379b4228e978dd801d3f55ac04b0a9ef8ed6c74ad14517d8"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.199029 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" event={"ID":"859a757f-9846-4176-8afd-b0e6b818563f","Type":"ContainerStarted","Data":"e09673755a1a97c148547f31e45a42f8096120b444dfba0983b3ed9ad5bb0c06"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.199040 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" event={"ID":"859a757f-9846-4176-8afd-b0e6b818563f","Type":"ContainerStarted","Data":"e2abeb714cfc5a25eb1f481497ec7516221b88e48989680b39d68448f264e9b2"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.200661 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" event={"ID":"0baef212-8f33-4c4f-b5e2-af5cad0333d5","Type":"ContainerStarted","Data":"87c28e210c9f08f408b10669c6f87960ac30c4348478b96b7e382dbd8e583617"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.204412 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" event={"ID":"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c","Type":"ContainerStarted","Data":"9ccaf59cf4c34a8ea6cc62b80eefb2049dcd2259a70b9b47bd81587395e8d5f5"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.204446 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" event={"ID":"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c","Type":"ContainerStarted","Data":"7c468333cfb7773c50fe6102f008de130dfb6b5843c5186f7231c4d309653953"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.207537 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" event={"ID":"2bcb7288-f87a-444d-b3f7-7cfa98d24a95","Type":"ContainerStarted","Data":"7f28d60ba5e2ba8ad7212dcb3b246055a4c000527bc1de3acb380830f832e206"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.207578 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" event={"ID":"2bcb7288-f87a-444d-b3f7-7cfa98d24a95","Type":"ContainerStarted","Data":"0c553496b3f810033fb0f6f9cd45279aaf87e7e4dada3dd813ccc2ff49c9af75"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.207590 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" event={"ID":"2bcb7288-f87a-444d-b3f7-7cfa98d24a95","Type":"ContainerStarted","Data":"7f1053cc8505fa3107bc0861c87b3658859b5db005c79a344a22c2234ff4ad93"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.210714 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" event={"ID":"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1","Type":"ContainerStarted","Data":"3c69525a3d086c365a64bce48f2c73af09159d882791a132ba03735af78f1756"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.210746 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" event={"ID":"c1c0a7e7-31d1-4dc7-a3b2-9d38c60075c1","Type":"ContainerStarted","Data":"5c101debc95610952bfcfdafd0ff74694c0970122151a46e869827ead15d9b3d"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.212390 4632 generic.go:334] "Generic (PLEG): container finished" podID="336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f" containerID="81bc65639a50b7bc778c02440c363456ca948260346ffe5d60892765cdf1c6bf" exitCode=0 Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.212426 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" event={"ID":"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f","Type":"ContainerDied","Data":"81bc65639a50b7bc778c02440c363456ca948260346ffe5d60892765cdf1c6bf"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.212442 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" event={"ID":"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f","Type":"ContainerStarted","Data":"3c5a7bdd72dee2ecf8cae57d6f870dd860ab57523c1dff5392fb5af39bcb22d1"} Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.219065 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.219472 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.719459029 +0000 UTC m=+137.284472002 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.226078 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.231163 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.253774 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.288814 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.294333 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-qkxnl" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.308118 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.312922 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5xz74" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.317604 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.319718 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.320596 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.820571166 +0000 UTC m=+137.385584140 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.421178 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.421534 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:27.921520156 +0000 UTC m=+137.486533129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.532381 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.533286 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.033262145 +0000 UTC m=+137.598275118 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.552586 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.634935 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.635255 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.135245729 +0000 UTC m=+137.700258703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.737662 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.738553 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.238530419 +0000 UTC m=+137.803543392 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.843141 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.843612 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.343602264 +0000 UTC m=+137.908615237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.875744 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-nqvnc"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.882064 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.905715 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn"] Dec 01 06:45:27 crc kubenswrapper[4632]: W1201 06:45:27.912702 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a77399a_5697_4f2d_8ec5_8e05106a356d.slice/crio-489163e555d25b980a61f10a1960daba1909ff9f7f0eece4dbec906dec5dc5e9 WatchSource:0}: Error finding container 489163e555d25b980a61f10a1960daba1909ff9f7f0eece4dbec906dec5dc5e9: Status 404 returned error can't find the container with id 489163e555d25b980a61f10a1960daba1909ff9f7f0eece4dbec906dec5dc5e9 Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.914446 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.916943 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wchfw"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.924292 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8"] Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.944454 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.946303 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.446285848 +0000 UTC m=+138.011298822 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.946730 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:27 crc kubenswrapper[4632]: E1201 06:45:27.946942 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.446934083 +0000 UTC m=+138.011947056 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:27 crc kubenswrapper[4632]: I1201 06:45:27.952999 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5"] Dec 01 06:45:27 crc kubenswrapper[4632]: W1201 06:45:27.969119 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c7cadd7_3773_48bd_a595_631a38d4d693.slice/crio-aed919d527c58187e2da959ced69fface7e6d57134f8368f9b0fb501ab771b15 WatchSource:0}: Error finding container aed919d527c58187e2da959ced69fface7e6d57134f8368f9b0fb501ab771b15: Status 404 returned error can't find the container with id aed919d527c58187e2da959ced69fface7e6d57134f8368f9b0fb501ab771b15 Dec 01 06:45:27 crc kubenswrapper[4632]: W1201 06:45:27.993720 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39126103_ae29_4563_af2f_cd549bb98ebb.slice/crio-4b1df4d71f0badb336d0746ce4f03101173f26076632dd7131dc3d458dde666c WatchSource:0}: Error finding container 4b1df4d71f0badb336d0746ce4f03101173f26076632dd7131dc3d458dde666c: Status 404 returned error can't find the container with id 4b1df4d71f0badb336d0746ce4f03101173f26076632dd7131dc3d458dde666c Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.047397 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.047554 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.547528203 +0000 UTC m=+138.112541176 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.047937 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.048222 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.548209048 +0000 UTC m=+138.113222022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.072698 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-789rs"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.082993 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.095731 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.149058 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.149548 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.649529339 +0000 UTC m=+138.214542312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.200722 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-qkxnl"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.251136 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.251745 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.751731436 +0000 UTC m=+138.316744408 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.265490 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.265572 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.270618 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.298453 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" event={"ID":"cb6f81ed-0172-4f32-97cb-b2911644523a","Type":"ContainerStarted","Data":"de36e55689bec4d9894850fd9a15b8dd0b7b158216d389852d27cfed1aee6429"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.323426 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.323831 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.332918 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.345511 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" event={"ID":"93b6b658-7ff9-48ad-bd7b-6518ff4cdd5c","Type":"ContainerStarted","Data":"0d2dd1091b020e7ab4f889a6b3f460fc154ce135c0a7c2840e74e16a09b6f963"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.352049 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.356572 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.358983 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" event={"ID":"39126103-ae29-4563-af2f-cd549bb98ebb","Type":"ContainerStarted","Data":"4b1df4d71f0badb336d0746ce4f03101173f26076632dd7131dc3d458dde666c"} Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.366693 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.866672014 +0000 UTC m=+138.431684987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.377516 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wchfw" event={"ID":"2181e6e0-5901-48a6-b44f-621e41a161d5","Type":"ContainerStarted","Data":"dd829fc6a4c64603e6e60f85ff2ff27fdbc46f377e2193d29c347de6bae7f4f0"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.424418 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" event={"ID":"3681c29d-32b7-4037-bd1c-18c2733173bc","Type":"ContainerStarted","Data":"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.424688 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" event={"ID":"3681c29d-32b7-4037-bd1c-18c2733173bc","Type":"ContainerStarted","Data":"fa79ce6097cd508c08b59552ee6cfbb6752fe6aee34d2b367f655107bd13e75e"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.425931 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.428846 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-xpv92" podStartSLOduration=118.428830981 podStartE2EDuration="1m58.428830981s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.426111056 +0000 UTC m=+137.991124029" watchObservedRunningTime="2025-12-01 06:45:28.428830981 +0000 UTC m=+137.993843954" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.437731 4632 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lw8pk container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" start-of-body= Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.437768 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.438790 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-d9ddv"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.453613 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-87rgh"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.454201 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.455110 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:28.955100382 +0000 UTC m=+138.520113355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.480531 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5xz74" event={"ID":"3c731d5d-a428-4eca-aaf3-441b5048a034","Type":"ContainerStarted","Data":"01b274884a95bd0a1e957393f8481acd36857ea8223f120d12f51984ceb8e268"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.480574 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5xz74" event={"ID":"3c731d5d-a428-4eca-aaf3-441b5048a034","Type":"ContainerStarted","Data":"9465807bcd98ac2e96f834aded42a8b664810af5cfde88eb2f94217bfe18d452"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.491429 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-jwp2r"] Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.508543 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" event={"ID":"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9","Type":"ContainerStarted","Data":"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.508586 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" event={"ID":"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9","Type":"ContainerStarted","Data":"2f0d7078458fca8dc2c85a5f8f902282ca814c99d73423455db4ece087118dbb"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.509469 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.550132 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-nr8wb" podStartSLOduration=118.550110218 podStartE2EDuration="1m58.550110218s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.548816065 +0000 UTC m=+138.113829048" watchObservedRunningTime="2025-12-01 06:45:28.550110218 +0000 UTC m=+138.115123191" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.556485 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" event={"ID":"93c3de01-c9ea-4c65-8d6d-5cce60e89f5d","Type":"ContainerStarted","Data":"88ea507a684506f14052d3c8bb38b845547808edbb3ce4a13e7ccecb1f6124b8"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.556736 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.564643 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.565551 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.065537367 +0000 UTC m=+138.630550340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.568577 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" event={"ID":"c439cd75-04a3-4881-8878-44f0a91f2088","Type":"ContainerStarted","Data":"f3ca7921fdb45a81e0c3015924a4adb9edbff4dbe7d303d27e67376541cad227"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.585742 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" event={"ID":"8a77399a-5697-4f2d-8ec5-8e05106a356d","Type":"ContainerStarted","Data":"489163e555d25b980a61f10a1960daba1909ff9f7f0eece4dbec906dec5dc5e9"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.586188 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4gjnq" podStartSLOduration=118.586175055 podStartE2EDuration="1m58.586175055s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.58596069 +0000 UTC m=+138.150973664" watchObservedRunningTime="2025-12-01 06:45:28.586175055 +0000 UTC m=+138.151188018" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.587695 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" event={"ID":"7c7cadd7-3773-48bd-a595-631a38d4d693","Type":"ContainerStarted","Data":"aed919d527c58187e2da959ced69fface7e6d57134f8368f9b0fb501ab771b15"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.589579 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.592419 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.600401 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" event={"ID":"3c63f605-5c81-44a5-b1f5-448b6f87c7a4","Type":"ContainerStarted","Data":"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.600611 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" event={"ID":"3c63f605-5c81-44a5-b1f5-448b6f87c7a4","Type":"ContainerStarted","Data":"da4c064489155000e508e54ba2c05d0cb685f5640332b3a1fd1de62798443be0"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.600963 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.607874 4632 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6vgtg container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.608094 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.621372 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-jdgfp" podStartSLOduration=119.621343199 podStartE2EDuration="1m59.621343199s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.620667283 +0000 UTC m=+138.185680256" watchObservedRunningTime="2025-12-01 06:45:28.621343199 +0000 UTC m=+138.186356172" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.627338 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" event={"ID":"6d196250-dda3-49c7-8f2d-4e4888010659","Type":"ContainerStarted","Data":"8caab6f4ce2e39c0bb6e01fbe716d692ec39ab5f2caa68b7167af4f235609d10"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.627393 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" event={"ID":"6d196250-dda3-49c7-8f2d-4e4888010659","Type":"ContainerStarted","Data":"d175bd1cb323c9fd51f64d64dd20489904928842dbc38e3d89495369e5987bf5"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.627403 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" event={"ID":"6d196250-dda3-49c7-8f2d-4e4888010659","Type":"ContainerStarted","Data":"d9f188a8731279268ec766810a2b5657095318c4d24ca4dba4d96bcff412845e"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.627858 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.638862 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-j4hgc" event={"ID":"8906a9ad-be53-4475-8b99-c2895ff794fa","Type":"ContainerStarted","Data":"6427fe213e555edb543d7afe0ad1ce4bcfe627d326e61edfff3899e4a6c0c631"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.638895 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-j4hgc" event={"ID":"8906a9ad-be53-4475-8b99-c2895ff794fa","Type":"ContainerStarted","Data":"60d363b6ca3e5950f7cc8338b1bf5a68b5c7031e6013b37bb29304479f94d69e"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.657749 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-52x8r" event={"ID":"ba580937-8b3b-404d-a9fe-3d4e014ce6b1","Type":"ContainerStarted","Data":"6307f6d97bd31ea884598fa1055cd83026aa404ea824f22d1edc47c85fca5ebf"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.666336 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.668023 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.168010215 +0000 UTC m=+138.733023177 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.673919 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-ttl89" podStartSLOduration=119.673906929 podStartE2EDuration="1m59.673906929s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.672712704 +0000 UTC m=+138.237725677" watchObservedRunningTime="2025-12-01 06:45:28.673906929 +0000 UTC m=+138.238919902" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.702708 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" event={"ID":"7fa3078d-93ff-452f-a746-010c568af171","Type":"ContainerStarted","Data":"29909f179096b2b2f4e76dc944208366d9129b69a0e1cdc94bfcde17729c5158"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.711537 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" event={"ID":"1114abcc-5526-4ef9-8e98-8fe888a77072","Type":"ContainerStarted","Data":"f3aaa794d0c2d4c9afeed0383a6885a93693f93321243d49eda4ef92abb35444"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.718807 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" event={"ID":"1114abcc-5526-4ef9-8e98-8fe888a77072","Type":"ContainerStarted","Data":"2aa637544b5303d0498435a1041432eb84b20fbc70762d89d4b5b98ef107631e"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.724961 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" event={"ID":"0baef212-8f33-4c4f-b5e2-af5cad0333d5","Type":"ContainerStarted","Data":"77802bcd12ecd27f6645cfe5516de44bb4634f2e905b131aa71c8ed08689b7ac"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.726254 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.743905 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" event={"ID":"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f","Type":"ContainerStarted","Data":"b1eb25ca64efbeccbca46c7d0c2da992254f907f0f60d1a0169c8cc04e976214"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.744141 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" event={"ID":"336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f","Type":"ContainerStarted","Data":"b8929d5b8ddd18cbea2e9695bb9314b9fe01df98b6c17b6f1ed92bfc8e433d4d"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.744118 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jgmv4" podStartSLOduration=118.744100456 podStartE2EDuration="1m58.744100456s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.740796468 +0000 UTC m=+138.305809441" watchObservedRunningTime="2025-12-01 06:45:28.744100456 +0000 UTC m=+138.309113430" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.766146 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" event={"ID":"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3","Type":"ContainerStarted","Data":"771b07d527edb97b6d8abd7ee296bbeda4c2c11da2788bdb8b4b23e5143a2b5d"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.766938 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.770021 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" event={"ID":"887e4a51-f7e3-4e2d-aa8e-10cd0793898b","Type":"ContainerStarted","Data":"00822de786f97e109483d84bf2d629d73df8ed1c0a73564e59f1be8216b0d129"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.770069 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" event={"ID":"887e4a51-f7e3-4e2d-aa8e-10cd0793898b","Type":"ContainerStarted","Data":"f29cfa047f2276eb41f66ca382ea4f117931c96904e4f7263e0e8d6a59d8fedd"} Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.772457 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.774178 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.274164653 +0000 UTC m=+138.839177627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.777513 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-hf9tg" podStartSLOduration=118.777498469 podStartE2EDuration="1m58.777498469s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.776722834 +0000 UTC m=+138.341735807" watchObservedRunningTime="2025-12-01 06:45:28.777498469 +0000 UTC m=+138.342511442" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.857764 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-lrlpn" podStartSLOduration=118.857745389 podStartE2EDuration="1m58.857745389s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.857590105 +0000 UTC m=+138.422603079" watchObservedRunningTime="2025-12-01 06:45:28.857745389 +0000 UTC m=+138.422758362" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.873944 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.878130 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.378117375 +0000 UTC m=+138.943130348 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.943716 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-g99wc" podStartSLOduration=118.943700298 podStartE2EDuration="1m58.943700298s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:28.943122135 +0000 UTC m=+138.508135109" watchObservedRunningTime="2025-12-01 06:45:28.943700298 +0000 UTC m=+138.508713271" Dec 01 06:45:28 crc kubenswrapper[4632]: I1201 06:45:28.975775 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:28 crc kubenswrapper[4632]: E1201 06:45:28.976146 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.47612959 +0000 UTC m=+139.041142563 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.025543 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" podStartSLOduration=119.025525428 podStartE2EDuration="1m59.025525428s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.025413837 +0000 UTC m=+138.590426810" watchObservedRunningTime="2025-12-01 06:45:29.025525428 +0000 UTC m=+138.590538402" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.070497 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" podStartSLOduration=119.070483216 podStartE2EDuration="1m59.070483216s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.070000596 +0000 UTC m=+138.635013579" watchObservedRunningTime="2025-12-01 06:45:29.070483216 +0000 UTC m=+138.635496180" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.079277 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.079663 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.57964739 +0000 UTC m=+139.144660363 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.102822 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-v966p" podStartSLOduration=119.102809064 podStartE2EDuration="1m59.102809064s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.101413598 +0000 UTC m=+138.666426572" watchObservedRunningTime="2025-12-01 06:45:29.102809064 +0000 UTC m=+138.667822037" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.141848 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" podStartSLOduration=119.141831565 podStartE2EDuration="1m59.141831565s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.141093892 +0000 UTC m=+138.706106865" watchObservedRunningTime="2025-12-01 06:45:29.141831565 +0000 UTC m=+138.706844538" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.180128 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.180556 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.680541145 +0000 UTC m=+139.245554119 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.181111 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-5xz74" podStartSLOduration=5.181096393 podStartE2EDuration="5.181096393s" podCreationTimestamp="2025-12-01 06:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.17923539 +0000 UTC m=+138.744248363" watchObservedRunningTime="2025-12-01 06:45:29.181096393 +0000 UTC m=+138.746109367" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.261484 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-xwqwc" podStartSLOduration=119.261468571 podStartE2EDuration="1m59.261468571s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.220618008 +0000 UTC m=+138.785630981" watchObservedRunningTime="2025-12-01 06:45:29.261468571 +0000 UTC m=+138.826481544" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.282648 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.282937 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.782926859 +0000 UTC m=+139.347939832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.290679 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.295896 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:29 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:29 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:29 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.295932 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.304499 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" podStartSLOduration=120.304481538 podStartE2EDuration="2m0.304481538s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.303586898 +0000 UTC m=+138.868599871" watchObservedRunningTime="2025-12-01 06:45:29.304481538 +0000 UTC m=+138.869494510" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.305088 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-j4hgc" podStartSLOduration=119.305067334 podStartE2EDuration="1m59.305067334s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.261920274 +0000 UTC m=+138.826933247" watchObservedRunningTime="2025-12-01 06:45:29.305067334 +0000 UTC m=+138.870080307" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.383380 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.383525 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.883505228 +0000 UTC m=+139.448518201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.383866 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.384167 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.884154174 +0000 UTC m=+139.449167147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.389622 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" podStartSLOduration=120.389590679 podStartE2EDuration="2m0.389590679s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.389301703 +0000 UTC m=+138.954314676" watchObservedRunningTime="2025-12-01 06:45:29.389590679 +0000 UTC m=+138.954603651" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.393336 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" podStartSLOduration=119.39332544 podStartE2EDuration="1m59.39332544s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.341267035 +0000 UTC m=+138.906280018" watchObservedRunningTime="2025-12-01 06:45:29.39332544 +0000 UTC m=+138.958338414" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.475702 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-nqvnc" podStartSLOduration=120.475685651 podStartE2EDuration="2m0.475685651s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.423991704 +0000 UTC m=+138.989004677" watchObservedRunningTime="2025-12-01 06:45:29.475685651 +0000 UTC m=+139.040698625" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.476941 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-58hth" podStartSLOduration=119.476935351 podStartE2EDuration="1m59.476935351s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.474777877 +0000 UTC m=+139.039790850" watchObservedRunningTime="2025-12-01 06:45:29.476935351 +0000 UTC m=+139.041948325" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.485277 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.485624 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:29.98560968 +0000 UTC m=+139.550622653 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.587035 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.587383 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.087370804 +0000 UTC m=+139.652383776 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.687898 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.687910 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.18789455 +0000 UTC m=+139.752907524 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.688521 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.688799 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.188787527 +0000 UTC m=+139.753800500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.776559 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-52x8r" event={"ID":"ba580937-8b3b-404d-a9fe-3d4e014ce6b1","Type":"ContainerStarted","Data":"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.783507 4632 generic.go:334] "Generic (PLEG): container finished" podID="8a77399a-5697-4f2d-8ec5-8e05106a356d" containerID="af9ed602cd340845c76eca70764e3b3031e5f8374664e165e51e69574ac60808" exitCode=0 Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.783584 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" event={"ID":"8a77399a-5697-4f2d-8ec5-8e05106a356d","Type":"ContainerStarted","Data":"ffbb23938ea16338a00af7e9a7d17a2756f1bce05a9f9a63cb78ac902f92354c"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.783602 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" event={"ID":"8a77399a-5697-4f2d-8ec5-8e05106a356d","Type":"ContainerDied","Data":"af9ed602cd340845c76eca70764e3b3031e5f8374664e165e51e69574ac60808"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.783733 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.784770 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wchfw" event={"ID":"2181e6e0-5901-48a6-b44f-621e41a161d5","Type":"ContainerStarted","Data":"e4a0336a4db3fb904ad809b6976b4672e099d9316fcbf44d73efe30a3427d8c4"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.785101 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.786534 4632 patch_prober.go:28] interesting pod/downloads-7954f5f757-wchfw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.786581 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wchfw" podUID="2181e6e0-5901-48a6-b44f-621e41a161d5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.787109 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qkxnl" event={"ID":"4d206d4a-17c1-4750-9984-35ff08f92e2e","Type":"ContainerStarted","Data":"6e41b842cb765e68ecbd13ad0025a5daeb987b3576b510a3aa398cc33086b20c"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.787150 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-qkxnl" event={"ID":"4d206d4a-17c1-4750-9984-35ff08f92e2e","Type":"ContainerStarted","Data":"683db65518179c3dc977cdcd2820a0d8ceb703c2094ea5c13a8897c9e0367be8"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.788668 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" event={"ID":"c4a7f54b-f039-4564-a917-b643351932f5","Type":"ContainerStarted","Data":"e9f46f661e50e7708c7c1cb0f59e79b9bc068e3753c78d55562142e7921f71ce"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.788692 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" event={"ID":"c4a7f54b-f039-4564-a917-b643351932f5","Type":"ContainerStarted","Data":"54a457aeadb0c823ae4c51a6b3df86939c9f3beadb6a632a93d2b29e8180d2d8"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.789554 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.789694 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.289658058 +0000 UTC m=+139.854671031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.790276 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.790526 4632 generic.go:334] "Generic (PLEG): container finished" podID="7fa3078d-93ff-452f-a746-010c568af171" containerID="8c8d4e4585da892b1724b63610189383ed7477c04282bdbbfae7c1c5b651265a" exitCode=0 Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.790598 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" event={"ID":"7fa3078d-93ff-452f-a746-010c568af171","Type":"ContainerDied","Data":"8c8d4e4585da892b1724b63610189383ed7477c04282bdbbfae7c1c5b651265a"} Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.790633 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.290618382 +0000 UTC m=+139.855631355 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.791881 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" event={"ID":"5d40b4d5-eff4-488b-94c1-6b7a7a5dcdc3","Type":"ContainerStarted","Data":"c33a3bd964d76e36de080758f2f3707045732ae45323757c7548382124762086"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.795373 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" event={"ID":"dab5433a-456e-4006-a05d-a2f04ebe1330","Type":"ContainerStarted","Data":"5e3bdd0d2c7c8f76812b00d29b407885019148b76f8e15acd6bb408e86c3284d"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.795405 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" event={"ID":"dab5433a-456e-4006-a05d-a2f04ebe1330","Type":"ContainerStarted","Data":"a2e11e788d47ec7f7782492ef3291ec3a74e68c976069768e7905a221d4068b3"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.796716 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" event={"ID":"7c7cadd7-3773-48bd-a595-631a38d4d693","Type":"ContainerStarted","Data":"404c3808e6bfc2ee16156f85ced9ef471ade232e7a1962478dd61f740a40cd1d"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.798286 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" event={"ID":"f324aefe-58e1-4638-915b-8cc658d13c5d","Type":"ContainerStarted","Data":"f9c370ecd406e14588a1c2e8ee4eeb62aaa80949a52dec7a3724c8053b304b85"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.798307 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" event={"ID":"f324aefe-58e1-4638-915b-8cc658d13c5d","Type":"ContainerStarted","Data":"8d683402d3614aaacffa537f50a2135ad9b434af249149fe9c894dbf0b118600"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.799200 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" event={"ID":"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe","Type":"ContainerStarted","Data":"b2a7b9e6a47ae84b576d441274e6bf2ad75eee67cd61c4340e0e9dd3dd65b65f"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.799232 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" event={"ID":"192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe","Type":"ContainerStarted","Data":"3dff872176b71e792a1afd82c109e6ead997315298f695bc81ab390415127daf"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.799796 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.801482 4632 patch_prober.go:28] interesting pod/console-operator-58897d9998-d9ddv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.801481 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" event={"ID":"cb6f81ed-0172-4f32-97cb-b2911644523a","Type":"ContainerStarted","Data":"c3b2725e9b3e44ac79284d76dc43b538e187a4b6e028177c067c43dba084bce4"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.801521 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" podUID="192cea7d-fce6-4d2f-8fc7-9b0ed2be15fe" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/readyz\": dial tcp 10.217.0.11:8443: connect: connection refused" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.801533 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" event={"ID":"cb6f81ed-0172-4f32-97cb-b2911644523a","Type":"ContainerStarted","Data":"a9f5d1143e9a41a59fd457a0649154d441e3654fd82bf099a558f2cd7f8cb7dd"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.804143 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" event={"ID":"4bdc3484-8840-4d88-83c9-32ba1da693fa","Type":"ContainerStarted","Data":"28d9133c587f34f95b34dd0fb2c0a76e7f5497761fc4a84fa956bca037e466f3"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.804164 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" event={"ID":"4bdc3484-8840-4d88-83c9-32ba1da693fa","Type":"ContainerStarted","Data":"7f4516fdb932071ea4f59b34d834a806acd1e987c30f591b62fd4555e3953948"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.804173 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" event={"ID":"4bdc3484-8840-4d88-83c9-32ba1da693fa","Type":"ContainerStarted","Data":"23c8d1e822914ca74d1f7524babdd39075c95a9657a6ee3f11482d68504f1a9a"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.806632 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" event={"ID":"39126103-ae29-4563-af2f-cd549bb98ebb","Type":"ContainerStarted","Data":"11b15d58e2f2fc4e71a1fcbc96875834bfd587c929991190cc0d7fd4f44b7d4d"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.809426 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" event={"ID":"978db707-ffa7-491e-b032-59db1b5aa9c9","Type":"ContainerStarted","Data":"c98a2c0b46697ac69f15886727450e32bbe30f26b3664565a038bf0d738f0deb"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.809476 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" event={"ID":"978db707-ffa7-491e-b032-59db1b5aa9c9","Type":"ContainerStarted","Data":"56fa4e2ffd4b9e85fceb60981907828870ef8354626b1acd695467da650a3e19"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.809702 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.811974 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" event={"ID":"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5","Type":"ContainerStarted","Data":"1cd42964bce679ef648855a0961f3763caab82045f6d0ef99e7a96c8cbb0ecaa"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.811996 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" event={"ID":"7a1eaf03-bfc0-4185-ad22-55e6ef75dae5","Type":"ContainerStarted","Data":"3aa970b024cde653ae8d5bf5eeae0a989c3d9cc51defd5228bd21b99aa6f303f"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.814618 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" event={"ID":"e354731f-7ed1-47e3-8d64-7d55f1613100","Type":"ContainerStarted","Data":"57a19ee000e125888d4a66508d80e253c404f820e95fb96a63b9930075f33ff5"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.814658 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" event={"ID":"e354731f-7ed1-47e3-8d64-7d55f1613100","Type":"ContainerStarted","Data":"3fab705bfd68eb8c81ab51a7ab184d280add3ae6e4f8962d83f4f693b8aabb76"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.819139 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" event={"ID":"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa","Type":"ContainerStarted","Data":"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.819185 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" event={"ID":"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa","Type":"ContainerStarted","Data":"c422236c2d457b76b27b0a898160ff00c57b76ccc1d9e76f5f3ed5e07bebfae1"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.819424 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.821626 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" event={"ID":"ae14b3fb-8e47-4b2c-af2a-689240f1dae9","Type":"ContainerStarted","Data":"6d1589b420a5f7e2d8b6480d12d0b21aa7c317b68000f2dabf1bc046a1b4a2e2"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.821659 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" event={"ID":"ae14b3fb-8e47-4b2c-af2a-689240f1dae9","Type":"ContainerStarted","Data":"a0703d0925cbb054dcb9ae468afc8c35d80a60d79185f973bdbffa16565c4d43"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.821669 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" event={"ID":"ae14b3fb-8e47-4b2c-af2a-689240f1dae9","Type":"ContainerStarted","Data":"40fab19c744010b87b9ea03fc5a68705aec625ad6df29c32eba64a69730abfe2"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.826816 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-wmm47" event={"ID":"1114abcc-5526-4ef9-8e98-8fe888a77072","Type":"ContainerStarted","Data":"1adf9a1df9692ed3e3ca4e18339994794736221c484463d217da2df9790912c7"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.829045 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-52x8r" podStartSLOduration=119.829035539 podStartE2EDuration="1m59.829035539s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.795472418 +0000 UTC m=+139.360485390" watchObservedRunningTime="2025-12-01 06:45:29.829035539 +0000 UTC m=+139.394048513" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.833091 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.839179 4632 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-d8bfn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.839217 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.22:8443/healthz\": dial tcp 10.217.0.22:8443: connect: connection refused" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.841584 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jwp2r" event={"ID":"514794d8-40b4-4938-9d15-aa39764ac45c","Type":"ContainerStarted","Data":"f9826f3d0a1f4f353a98656d1390d3f99875cf9a46803390606acd61296b7585"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.841614 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jwp2r" event={"ID":"514794d8-40b4-4938-9d15-aa39764ac45c","Type":"ContainerStarted","Data":"1bfd7d33b6c9b00efd8bcd99f09397c1e5c0099b43390e98e9cc26e43e03d7c0"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.841626 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-jwp2r" event={"ID":"514794d8-40b4-4938-9d15-aa39764ac45c","Type":"ContainerStarted","Data":"012c7ee3968c4421d2df028f8efa3d941b3fee5e8293d68c029fce29ed8673ad"} Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.854677 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.855255 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-qkxnl" podStartSLOduration=6.855242894 podStartE2EDuration="6.855242894s" podCreationTimestamp="2025-12-01 06:45:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.854908613 +0000 UTC m=+139.419921586" watchObservedRunningTime="2025-12-01 06:45:29.855242894 +0000 UTC m=+139.420255868" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.859803 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.877864 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" podStartSLOduration=120.877841916 podStartE2EDuration="2m0.877841916s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.875421425 +0000 UTC m=+139.440434398" watchObservedRunningTime="2025-12-01 06:45:29.877841916 +0000 UTC m=+139.442854889" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.891179 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.896473 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.396451504 +0000 UTC m=+139.961464477 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.896931 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.897341 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.397330433 +0000 UTC m=+139.962343406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.915130 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-br5vj" podStartSLOduration=119.915115895 podStartE2EDuration="1m59.915115895s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.914649184 +0000 UTC m=+139.479662157" watchObservedRunningTime="2025-12-01 06:45:29.915115895 +0000 UTC m=+139.480128868" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.947782 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-789rs" podStartSLOduration=119.947769832 podStartE2EDuration="1m59.947769832s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.946616754 +0000 UTC m=+139.511629728" watchObservedRunningTime="2025-12-01 06:45:29.947769832 +0000 UTC m=+139.512782805" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.980030 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-wchfw" podStartSLOduration=119.980012222 podStartE2EDuration="1m59.980012222s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.965503266 +0000 UTC m=+139.530516238" watchObservedRunningTime="2025-12-01 06:45:29.980012222 +0000 UTC m=+139.545025194" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.980556 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" podStartSLOduration=29.980549406 podStartE2EDuration="29.980549406s" podCreationTimestamp="2025-12-01 06:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:29.978492813 +0000 UTC m=+139.543505805" watchObservedRunningTime="2025-12-01 06:45:29.980549406 +0000 UTC m=+139.545562379" Dec 01 06:45:29 crc kubenswrapper[4632]: I1201 06:45:29.998847 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:29 crc kubenswrapper[4632]: E1201 06:45:29.999283 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.499271005 +0000 UTC m=+140.064283978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.015681 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" podStartSLOduration=121.015667946 podStartE2EDuration="2m1.015667946s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.014645094 +0000 UTC m=+139.579658067" watchObservedRunningTime="2025-12-01 06:45:30.015667946 +0000 UTC m=+139.580680919" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.060865 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-2spdj" podStartSLOduration=120.060847814 podStartE2EDuration="2m0.060847814s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.047011117 +0000 UTC m=+139.612024091" watchObservedRunningTime="2025-12-01 06:45:30.060847814 +0000 UTC m=+139.625860786" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.083566 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-j6rr8" podStartSLOduration=120.08354512 podStartE2EDuration="2m0.08354512s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.077021353 +0000 UTC m=+139.642034325" watchObservedRunningTime="2025-12-01 06:45:30.08354512 +0000 UTC m=+139.648558093" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.100914 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.101216 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.601205356 +0000 UTC m=+140.166218329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.146264 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-m2fgg" podStartSLOduration=121.146247413 podStartE2EDuration="2m1.146247413s" podCreationTimestamp="2025-12-01 06:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.14595441 +0000 UTC m=+139.710967383" watchObservedRunningTime="2025-12-01 06:45:30.146247413 +0000 UTC m=+139.711260386" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.198968 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-kmxd4" podStartSLOduration=120.198926551 podStartE2EDuration="2m0.198926551s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.17580874 +0000 UTC m=+139.740821713" watchObservedRunningTime="2025-12-01 06:45:30.198926551 +0000 UTC m=+139.763939524" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.200076 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-7r8sp" podStartSLOduration=120.200065452 podStartE2EDuration="2m0.200065452s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.196962562 +0000 UTC m=+139.761975545" watchObservedRunningTime="2025-12-01 06:45:30.200065452 +0000 UTC m=+139.765078425" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.205004 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.205408 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.705393472 +0000 UTC m=+140.270406445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.261883 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cn2l5" podStartSLOduration=120.261867024 podStartE2EDuration="2m0.261867024s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.228567368 +0000 UTC m=+139.793580342" watchObservedRunningTime="2025-12-01 06:45:30.261867024 +0000 UTC m=+139.826879998" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.285412 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-mxcl4" podStartSLOduration=120.285396442 podStartE2EDuration="2m0.285396442s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.283759932 +0000 UTC m=+139.848772895" watchObservedRunningTime="2025-12-01 06:45:30.285396442 +0000 UTC m=+139.850409414" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.291513 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:30 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:30 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:30 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.291559 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.303684 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-jrfmt" podStartSLOduration=120.303669074 podStartE2EDuration="2m0.303669074s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.302995492 +0000 UTC m=+139.868008465" watchObservedRunningTime="2025-12-01 06:45:30.303669074 +0000 UTC m=+139.868682048" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.306883 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.307174 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.807158783 +0000 UTC m=+140.372171756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.317934 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.327105 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" podStartSLOduration=120.327093254 podStartE2EDuration="2m0.327093254s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.324222994 +0000 UTC m=+139.889235966" watchObservedRunningTime="2025-12-01 06:45:30.327093254 +0000 UTC m=+139.892106226" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.408038 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.408156 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.908137109 +0000 UTC m=+140.473150082 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.408380 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.408644 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:30.908635279 +0000 UTC m=+140.473648252 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.514300 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.514733 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.014719736 +0000 UTC m=+140.579732709 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.615897 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.616277 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.116245033 +0000 UTC m=+140.681258007 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.717605 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.717767 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.217745264 +0000 UTC m=+140.782758237 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.717863 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.718139 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.218127295 +0000 UTC m=+140.783140269 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.798190 4632 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.818529 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.818643 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.318621838 +0000 UTC m=+140.883634811 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.818807 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:30 crc kubenswrapper[4632]: E1201 06:45:30.819121 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-01 06:45:31.319109849 +0000 UTC m=+140.884122823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-w2js2" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.852200 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" event={"ID":"7fa3078d-93ff-452f-a746-010c568af171","Type":"ContainerStarted","Data":"dfdb343fca8e12036d8bfbfe592ba3f190da366012a0a03970c9664d28fa7b42"} Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.854701 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" event={"ID":"f324aefe-58e1-4638-915b-8cc658d13c5d","Type":"ContainerStarted","Data":"61d015ff1b2657b13308b4d122b5c30f81e5c1b00ab34429edf10409501899a6"} Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.854743 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" event={"ID":"f324aefe-58e1-4638-915b-8cc658d13c5d","Type":"ContainerStarted","Data":"3fe8a2091e7dd74fae536d20da1f46eab672da995a6f148c285b3541b4f818b9"} Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.855339 4632 patch_prober.go:28] interesting pod/downloads-7954f5f757-wchfw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.855694 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wchfw" podUID="2181e6e0-5901-48a6-b44f-621e41a161d5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.859975 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-d9ddv" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.868607 4632 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-01T06:45:30.798209616Z","Handler":null,"Name":""} Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.869719 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.871654 4632 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.871766 4632 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.878420 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-jwp2r" podStartSLOduration=6.878408977 podStartE2EDuration="6.878408977s" podCreationTimestamp="2025-12-01 06:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.391203124 +0000 UTC m=+139.956216098" watchObservedRunningTime="2025-12-01 06:45:30.878408977 +0000 UTC m=+140.443421950" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.879499 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" podStartSLOduration=120.879493054 podStartE2EDuration="2m0.879493054s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.877956633 +0000 UTC m=+140.442969606" watchObservedRunningTime="2025-12-01 06:45:30.879493054 +0000 UTC m=+140.444506027" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.919618 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.920143 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" podStartSLOduration=6.920132538 podStartE2EDuration="6.920132538s" podCreationTimestamp="2025-12-01 06:45:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:30.900237664 +0000 UTC m=+140.465250637" watchObservedRunningTime="2025-12-01 06:45:30.920132538 +0000 UTC m=+140.485145511" Dec 01 06:45:30 crc kubenswrapper[4632]: I1201 06:45:30.935820 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.022196 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.025977 4632 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.026020 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.061382 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.062313 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.066544 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.068321 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.068608 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.075520 4632 patch_prober.go:28] interesting pod/apiserver-76f77b778f-kxph4 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]log ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]etcd ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/generic-apiserver-start-informers ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/max-in-flight-filter ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 01 06:45:31 crc kubenswrapper[4632]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/project.openshift.io-projectcache ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/openshift.io-startinformers ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 01 06:45:31 crc kubenswrapper[4632]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 01 06:45:31 crc kubenswrapper[4632]: livez check failed Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.075557 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" podUID="336ac52f-ee34-4bab-a0c6-d8c1d40d6e2f" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.112024 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-w2js2\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.122877 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.123110 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.123246 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ppkv\" (UniqueName: \"kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.138513 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.226507 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.226822 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ppkv\" (UniqueName: \"kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.226941 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.227036 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.227276 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.245584 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ppkv\" (UniqueName: \"kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv\") pod \"certified-operators-h6xj5\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.254311 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.255124 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.257328 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.260627 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.291626 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:31 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:31 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:31 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.291678 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.328553 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.328669 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm7n7\" (UniqueName: \"kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.328789 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.374345 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.378731 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.430385 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.430500 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.430547 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm7n7\" (UniqueName: \"kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.430986 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.431214 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.450471 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm7n7\" (UniqueName: \"kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7\") pod \"community-operators-5w5vx\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.460280 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.461058 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.474044 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.533263 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmqpk\" (UniqueName: \"kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.533618 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.533715 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.569627 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.637957 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.638014 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.638078 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmqpk\" (UniqueName: \"kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.638823 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.639036 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.688608 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.713610 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.718743 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.725192 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmqpk\" (UniqueName: \"kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk\") pod \"certified-operators-dgtqs\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.774305 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.777397 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.815182 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:45:31 crc kubenswrapper[4632]: W1201 06:45:31.828607 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cd355d1_fe1f_4c1d_81fa_280d03a6c78a.slice/crio-b008e23df9addff912e0ee8cdaf50a618c09be9222a9aa0e62ed2bae6ef9f683 WatchSource:0}: Error finding container b008e23df9addff912e0ee8cdaf50a618c09be9222a9aa0e62ed2bae6ef9f683: Status 404 returned error can't find the container with id b008e23df9addff912e0ee8cdaf50a618c09be9222a9aa0e62ed2bae6ef9f683 Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.837878 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.838134 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.839978 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.840014 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.840054 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fksgn\" (UniqueName: \"kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.850875 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.879551 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-87rgh" event={"ID":"f324aefe-58e1-4638-915b-8cc658d13c5d","Type":"ContainerStarted","Data":"967a31c304673f2b1156c8f4c0360750b7907cf7427db54470abdd65b8588560"} Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.880369 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.886323 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" event={"ID":"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a","Type":"ContainerStarted","Data":"b008e23df9addff912e0ee8cdaf50a618c09be9222a9aa0e62ed2bae6ef9f683"} Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.889661 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerStarted","Data":"77d6a159ceacc7eabbd4c9ed0b19e14151d8f248ad06a1d15bbfe40505fbf28a"} Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.892063 4632 generic.go:334] "Generic (PLEG): container finished" podID="dab5433a-456e-4006-a05d-a2f04ebe1330" containerID="5e3bdd0d2c7c8f76812b00d29b407885019148b76f8e15acd6bb408e86c3284d" exitCode=0 Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.892517 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" event={"ID":"dab5433a-456e-4006-a05d-a2f04ebe1330","Type":"ContainerDied","Data":"5e3bdd0d2c7c8f76812b00d29b407885019148b76f8e15acd6bb408e86c3284d"} Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.893990 4632 patch_prober.go:28] interesting pod/downloads-7954f5f757-wchfw container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.894057 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wchfw" podUID="2181e6e0-5901-48a6-b44f-621e41a161d5" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.899715 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-lp6wn" Dec 01 06:45:31 crc kubenswrapper[4632]: W1201 06:45:31.901297 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod84310075_a3e6_46ff_906c_372ee393d197.slice/crio-383485e3cea0f76db2f46cc22b1062fcfa9dde924dd7db1c92ab3f1a3767dc0c WatchSource:0}: Error finding container 383485e3cea0f76db2f46cc22b1062fcfa9dde924dd7db1c92ab3f1a3767dc0c: Status 404 returned error can't find the container with id 383485e3cea0f76db2f46cc22b1062fcfa9dde924dd7db1c92ab3f1a3767dc0c Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.940658 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.940690 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.940818 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fksgn\" (UniqueName: \"kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.941882 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.942120 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:31 crc kubenswrapper[4632]: I1201 06:45:31.962416 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fksgn\" (UniqueName: \"kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn\") pod \"community-operators-2cmb4\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.004231 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:45:32 crc kubenswrapper[4632]: W1201 06:45:32.011137 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39776e22_d730_44e7_b6a5_f7707cffb5af.slice/crio-e547e3485d53b743dbc0979eda47ae44461b4083ccd1b7ef161103d6b8df98d6 WatchSource:0}: Error finding container e547e3485d53b743dbc0979eda47ae44461b4083ccd1b7ef161103d6b8df98d6: Status 404 returned error can't find the container with id e547e3485d53b743dbc0979eda47ae44461b4083ccd1b7ef161103d6b8df98d6 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.053636 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.204901 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:45:32 crc kubenswrapper[4632]: W1201 06:45:32.205913 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cba9c81_c125_4984_9e0f_0783b49b8bf6.slice/crio-5fbbd3f3e743e19248215db5bfb72b5c83931d67dfa7e420f0a99a984551ff54 WatchSource:0}: Error finding container 5fbbd3f3e743e19248215db5bfb72b5c83931d67dfa7e420f0a99a984551ff54: Status 404 returned error can't find the container with id 5fbbd3f3e743e19248215db5bfb72b5c83931d67dfa7e420f0a99a984551ff54 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.291775 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:32 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:32 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:32 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.291829 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.755063 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.845305 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-gkwgl" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.903836 4632 generic.go:334] "Generic (PLEG): container finished" podID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerID="935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573" exitCode=0 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.903875 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerDied","Data":"935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.903914 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerStarted","Data":"5fbbd3f3e743e19248215db5bfb72b5c83931d67dfa7e420f0a99a984551ff54"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.905653 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.905851 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" event={"ID":"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a","Type":"ContainerStarted","Data":"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.905988 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.906992 4632 generic.go:334] "Generic (PLEG): container finished" podID="84310075-a3e6-46ff-906c-372ee393d197" containerID="5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6" exitCode=0 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.907045 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerDied","Data":"5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.907104 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerStarted","Data":"383485e3cea0f76db2f46cc22b1062fcfa9dde924dd7db1c92ab3f1a3767dc0c"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.908439 4632 generic.go:334] "Generic (PLEG): container finished" podID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerID="5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45" exitCode=0 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.908483 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerDied","Data":"5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.909693 4632 generic.go:334] "Generic (PLEG): container finished" podID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerID="1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2" exitCode=0 Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.910065 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerDied","Data":"1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.910089 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerStarted","Data":"e547e3485d53b743dbc0979eda47ae44461b4083ccd1b7ef161103d6b8df98d6"} Dec 01 06:45:32 crc kubenswrapper[4632]: I1201 06:45:32.959858 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" podStartSLOduration=122.959845304 podStartE2EDuration="2m2.959845304s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:32.957810391 +0000 UTC m=+142.522823374" watchObservedRunningTime="2025-12-01 06:45:32.959845304 +0000 UTC m=+142.524858267" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.069908 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.154546 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume\") pod \"dab5433a-456e-4006-a05d-a2f04ebe1330\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.154610 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q89k4\" (UniqueName: \"kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4\") pod \"dab5433a-456e-4006-a05d-a2f04ebe1330\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.154648 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume\") pod \"dab5433a-456e-4006-a05d-a2f04ebe1330\" (UID: \"dab5433a-456e-4006-a05d-a2f04ebe1330\") " Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.156060 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume" (OuterVolumeSpecName: "config-volume") pod "dab5433a-456e-4006-a05d-a2f04ebe1330" (UID: "dab5433a-456e-4006-a05d-a2f04ebe1330"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.171914 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "dab5433a-456e-4006-a05d-a2f04ebe1330" (UID: "dab5433a-456e-4006-a05d-a2f04ebe1330"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.171972 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4" (OuterVolumeSpecName: "kube-api-access-q89k4") pod "dab5433a-456e-4006-a05d-a2f04ebe1330" (UID: "dab5433a-456e-4006-a05d-a2f04ebe1330"). InnerVolumeSpecName "kube-api-access-q89k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.256857 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.256922 4632 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/dab5433a-456e-4006-a05d-a2f04ebe1330-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.257662 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q89k4\" (UniqueName: \"kubernetes.io/projected/dab5433a-456e-4006-a05d-a2f04ebe1330-kube-api-access-q89k4\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.257677 4632 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/dab5433a-456e-4006-a05d-a2f04ebe1330-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:33 crc kubenswrapper[4632]: E1201 06:45:33.257864 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dab5433a-456e-4006-a05d-a2f04ebe1330" containerName="collect-profiles" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.257889 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="dab5433a-456e-4006-a05d-a2f04ebe1330" containerName="collect-profiles" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.257999 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="dab5433a-456e-4006-a05d-a2f04ebe1330" containerName="collect-profiles" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.258734 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.260582 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.261222 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.292535 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:33 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:33 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:33 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.292591 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.359026 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.359124 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snq7r\" (UniqueName: \"kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.359193 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.460202 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.460535 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.460582 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snq7r\" (UniqueName: \"kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.460944 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.461003 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.475475 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snq7r\" (UniqueName: \"kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r\") pod \"redhat-marketplace-lcqc9\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.584801 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.649001 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.649876 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.657382 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.723164 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:45:33 crc kubenswrapper[4632]: W1201 06:45:33.729752 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcc4ee60_b4e0_4d3f_8e08_76be1749a745.slice/crio-6141f7d035dc03d84bd497432da908f6098a628fcc929ce08218f1209000dfe7 WatchSource:0}: Error finding container 6141f7d035dc03d84bd497432da908f6098a628fcc929ce08218f1209000dfe7: Status 404 returned error can't find the container with id 6141f7d035dc03d84bd497432da908f6098a628fcc929ce08218f1209000dfe7 Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.763969 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.764147 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzs5f\" (UniqueName: \"kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.764230 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.865827 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.865909 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzs5f\" (UniqueName: \"kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.866005 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.866300 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.866324 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.883641 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzs5f\" (UniqueName: \"kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f\") pod \"redhat-marketplace-qjftw\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.916260 4632 generic.go:334] "Generic (PLEG): container finished" podID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerID="13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a" exitCode=0 Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.916328 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerDied","Data":"13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a"} Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.916381 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerStarted","Data":"6141f7d035dc03d84bd497432da908f6098a628fcc929ce08218f1209000dfe7"} Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.918294 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" event={"ID":"dab5433a-456e-4006-a05d-a2f04ebe1330","Type":"ContainerDied","Data":"a2e11e788d47ec7f7782492ef3291ec3a74e68c976069768e7905a221d4068b3"} Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.918334 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2e11e788d47ec7f7782492ef3291ec3a74e68c976069768e7905a221d4068b3" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.918624 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6" Dec 01 06:45:33 crc kubenswrapper[4632]: I1201 06:45:33.969495 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.075386 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.076130 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.083497 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.084373 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.085163 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.135844 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:45:34 crc kubenswrapper[4632]: W1201 06:45:34.146923 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podddf1a2a6_d01c_45d0_b8e1_6e75690f8feb.slice/crio-89f94bb35a22498cb8d17c47eaf00937d10a4164841be85b6a8ff0df89de7f3f WatchSource:0}: Error finding container 89f94bb35a22498cb8d17c47eaf00937d10a4164841be85b6a8ff0df89de7f3f: Status 404 returned error can't find the container with id 89f94bb35a22498cb8d17c47eaf00937d10a4164841be85b6a8ff0df89de7f3f Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.171000 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.171055 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.249059 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.250618 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.255653 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.255989 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.272754 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.272801 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.272921 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.287690 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.291973 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:34 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:34 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:34 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.292223 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.375285 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-295l2\" (UniqueName: \"kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.375378 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.375400 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.406872 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.476993 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-295l2\" (UniqueName: \"kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.477044 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.477063 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.477627 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.477643 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.492762 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-295l2\" (UniqueName: \"kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2\") pod \"redhat-operators-n24r7\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.572890 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.605542 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.650396 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.651872 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.664634 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.781285 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.781656 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtzkz\" (UniqueName: \"kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.781702 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.839324 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:45:34 crc kubenswrapper[4632]: W1201 06:45:34.843448 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56a27481_d126_4d90_8082_75063f21c2ac.slice/crio-d0b321282cfa94a0ab336f9ea85fa4cafdf52d2551ddb3300a29c875a9a92e58 WatchSource:0}: Error finding container d0b321282cfa94a0ab336f9ea85fa4cafdf52d2551ddb3300a29c875a9a92e58: Status 404 returned error can't find the container with id d0b321282cfa94a0ab336f9ea85fa4cafdf52d2551ddb3300a29c875a9a92e58 Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.882981 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtzkz\" (UniqueName: \"kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.883095 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.883175 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.883639 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.884131 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.898406 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtzkz\" (UniqueName: \"kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz\") pod \"redhat-operators-t2hmz\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.931693 4632 generic.go:334] "Generic (PLEG): container finished" podID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerID="33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5" exitCode=0 Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.931757 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerDied","Data":"33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5"} Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.931800 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerStarted","Data":"89f94bb35a22498cb8d17c47eaf00937d10a4164841be85b6a8ff0df89de7f3f"} Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.937759 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c9510b5-441a-4e44-bfcb-5feb7abc998a","Type":"ContainerStarted","Data":"b0bc0ab0ac60c6b3a7ef030f07d424dc34e57528dadd4329278856f035f82926"} Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.942776 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerStarted","Data":"d0b321282cfa94a0ab336f9ea85fa4cafdf52d2551ddb3300a29c875a9a92e58"} Dec 01 06:45:34 crc kubenswrapper[4632]: I1201 06:45:34.973180 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.228032 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.293454 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:35 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:35 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:35 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.293495 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.949064 4632 generic.go:334] "Generic (PLEG): container finished" podID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerID="fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4" exitCode=0 Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.949176 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerDied","Data":"fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4"} Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.949490 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerStarted","Data":"7fc035384b2076599d9648431ef04715184ac1691d338b6377335aadad0bc059"} Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.962846 4632 generic.go:334] "Generic (PLEG): container finished" podID="2c9510b5-441a-4e44-bfcb-5feb7abc998a" containerID="78e9dc442d13b06fb5af97f1de6f749a16d20b33e3af1e683affbbed9731df35" exitCode=0 Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.962999 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c9510b5-441a-4e44-bfcb-5feb7abc998a","Type":"ContainerDied","Data":"78e9dc442d13b06fb5af97f1de6f749a16d20b33e3af1e683affbbed9731df35"} Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.966331 4632 generic.go:334] "Generic (PLEG): container finished" podID="56a27481-d126-4d90-8082-75063f21c2ac" containerID="cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797" exitCode=0 Dec 01 06:45:35 crc kubenswrapper[4632]: I1201 06:45:35.966398 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerDied","Data":"cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797"} Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.073047 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.077716 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-kxph4" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.291838 4632 patch_prober.go:28] interesting pod/router-default-5444994796-j4hgc container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 01 06:45:36 crc kubenswrapper[4632]: [-]has-synced failed: reason withheld Dec 01 06:45:36 crc kubenswrapper[4632]: [+]process-running ok Dec 01 06:45:36 crc kubenswrapper[4632]: healthz check failed Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.291898 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-j4hgc" podUID="8906a9ad-be53-4475-8b99-c2895ff794fa" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.811868 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.811921 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.811975 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.812550 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.812737 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.813755 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-wchfw" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.818347 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.821735 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.823126 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.829378 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.830726 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.838617 4632 patch_prober.go:28] interesting pod/console-f9d7485db-52x8r container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.838670 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-52x8r" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.26:8443/health\": dial tcp 10.217.0.26:8443: connect: connection refused" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.861318 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.871954 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 01 06:45:36 crc kubenswrapper[4632]: I1201 06:45:36.876507 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.132839 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.133520 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.137811 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.139698 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.140225 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.217866 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.217918 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.289871 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.292510 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.318793 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.318836 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.319045 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.334375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:37 crc kubenswrapper[4632]: I1201 06:45:37.454980 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:38 crc kubenswrapper[4632]: I1201 06:45:38.005803 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-j4hgc" Dec 01 06:45:39 crc kubenswrapper[4632]: I1201 06:45:39.320467 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-jwp2r" Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.385717 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.473022 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access\") pod \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.473195 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir\") pod \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\" (UID: \"2c9510b5-441a-4e44-bfcb-5feb7abc998a\") " Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.473287 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2c9510b5-441a-4e44-bfcb-5feb7abc998a" (UID: "2c9510b5-441a-4e44-bfcb-5feb7abc998a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.473694 4632 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.477399 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2c9510b5-441a-4e44-bfcb-5feb7abc998a" (UID: "2c9510b5-441a-4e44-bfcb-5feb7abc998a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:45:40 crc kubenswrapper[4632]: I1201 06:45:40.575265 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c9510b5-441a-4e44-bfcb-5feb7abc998a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:41 crc kubenswrapper[4632]: I1201 06:45:41.018094 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"2c9510b5-441a-4e44-bfcb-5feb7abc998a","Type":"ContainerDied","Data":"b0bc0ab0ac60c6b3a7ef030f07d424dc34e57528dadd4329278856f035f82926"} Dec 01 06:45:41 crc kubenswrapper[4632]: I1201 06:45:41.018559 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0bc0ab0ac60c6b3a7ef030f07d424dc34e57528dadd4329278856f035f82926" Dec 01 06:45:41 crc kubenswrapper[4632]: I1201 06:45:41.018194 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 01 06:45:41 crc kubenswrapper[4632]: I1201 06:45:41.027729 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:45:41 crc kubenswrapper[4632]: I1201 06:45:41.252707 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 01 06:45:41 crc kubenswrapper[4632]: W1201 06:45:41.279189 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod55ebde61_9ce4_4769_9326_fb97d8bf6648.slice/crio-a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b WatchSource:0}: Error finding container a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b: Status 404 returned error can't find the container with id a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b Dec 01 06:45:41 crc kubenswrapper[4632]: W1201 06:45:41.286345 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-83bd53d722cc979b04a3a2ffd759800c92bca0fcd7696eea7e951ebb7426f6a6 WatchSource:0}: Error finding container 83bd53d722cc979b04a3a2ffd759800c92bca0fcd7696eea7e951ebb7426f6a6: Status 404 returned error can't find the container with id 83bd53d722cc979b04a3a2ffd759800c92bca0fcd7696eea7e951ebb7426f6a6 Dec 01 06:45:41 crc kubenswrapper[4632]: W1201 06:45:41.291093 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-48e4ed58827964c16fca5570e0ec13eb87b4724f646f11078e06582d95e08ef4 WatchSource:0}: Error finding container 48e4ed58827964c16fca5570e0ec13eb87b4724f646f11078e06582d95e08ef4: Status 404 returned error can't find the container with id 48e4ed58827964c16fca5570e0ec13eb87b4724f646f11078e06582d95e08ef4 Dec 01 06:45:41 crc kubenswrapper[4632]: W1201 06:45:41.347724 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-2277ac389731ac02653f8371cebaa4d59321eb6040b5d3dfee6277bd2b402015 WatchSource:0}: Error finding container 2277ac389731ac02653f8371cebaa4d59321eb6040b5d3dfee6277bd2b402015: Status 404 returned error can't find the container with id 2277ac389731ac02653f8371cebaa4d59321eb6040b5d3dfee6277bd2b402015 Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.024911 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"685da66242fca30c1707691c3192394d4880f51f8acaa6c8680423e146f48450"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.025447 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"83bd53d722cc979b04a3a2ffd759800c92bca0fcd7696eea7e951ebb7426f6a6"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.025685 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.028785 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"134dae4a9bb77a5bf2afb177a90bc5ca09cde286a8cdb453fe0360bf4e7e7399"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.028840 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"48e4ed58827964c16fca5570e0ec13eb87b4724f646f11078e06582d95e08ef4"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.030625 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"704a7d759189cb1c9a92749cbc76d587abbd9f6ecb3feb8240fd6fe1dc1a7967"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.030683 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"2277ac389731ac02653f8371cebaa4d59321eb6040b5d3dfee6277bd2b402015"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.034014 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"55ebde61-9ce4-4769-9326-fb97d8bf6648","Type":"ContainerStarted","Data":"21233ddb7460c58dc6c79a2d5d6daf6888262f3cd68533146d62021955669302"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.034042 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"55ebde61-9ce4-4769-9326-fb97d8bf6648","Type":"ContainerStarted","Data":"a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b"} Dec 01 06:45:42 crc kubenswrapper[4632]: I1201 06:45:42.060057 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=5.060039704 podStartE2EDuration="5.060039704s" podCreationTimestamp="2025-12-01 06:45:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:42.056598326 +0000 UTC m=+151.621611299" watchObservedRunningTime="2025-12-01 06:45:42.060039704 +0000 UTC m=+151.625052666" Dec 01 06:45:43 crc kubenswrapper[4632]: I1201 06:45:43.049624 4632 generic.go:334] "Generic (PLEG): container finished" podID="55ebde61-9ce4-4769-9326-fb97d8bf6648" containerID="21233ddb7460c58dc6c79a2d5d6daf6888262f3cd68533146d62021955669302" exitCode=0 Dec 01 06:45:43 crc kubenswrapper[4632]: I1201 06:45:43.049686 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"55ebde61-9ce4-4769-9326-fb97d8bf6648","Type":"ContainerDied","Data":"21233ddb7460c58dc6c79a2d5d6daf6888262f3cd68533146d62021955669302"} Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.884403 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.960773 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access\") pod \"55ebde61-9ce4-4769-9326-fb97d8bf6648\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.960901 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir\") pod \"55ebde61-9ce4-4769-9326-fb97d8bf6648\" (UID: \"55ebde61-9ce4-4769-9326-fb97d8bf6648\") " Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.961031 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "55ebde61-9ce4-4769-9326-fb97d8bf6648" (UID: "55ebde61-9ce4-4769-9326-fb97d8bf6648"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.961268 4632 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/55ebde61-9ce4-4769-9326-fb97d8bf6648-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:45 crc kubenswrapper[4632]: I1201 06:45:45.966507 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "55ebde61-9ce4-4769-9326-fb97d8bf6648" (UID: "55ebde61-9ce4-4769-9326-fb97d8bf6648"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.062985 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/55ebde61-9ce4-4769-9326-fb97d8bf6648-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.067921 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"55ebde61-9ce4-4769-9326-fb97d8bf6648","Type":"ContainerDied","Data":"a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b"} Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.067958 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1773a642cdeb31595995d60c84595b80c9fd95b478a31aa99ac73249a67541b" Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.068009 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.834283 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:46 crc kubenswrapper[4632]: I1201 06:45:46.838056 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:45:49 crc kubenswrapper[4632]: I1201 06:45:49.497755 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:45:49 crc kubenswrapper[4632]: I1201 06:45:49.498196 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:45:51 crc kubenswrapper[4632]: I1201 06:45:51.383226 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:45:51 crc kubenswrapper[4632]: I1201 06:45:51.732432 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:51 crc kubenswrapper[4632]: I1201 06:45:51.737593 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/559abf1e-dc19-40e9-b75b-9a327d661dc0-metrics-certs\") pod \"network-metrics-daemon-nqqbv\" (UID: \"559abf1e-dc19-40e9-b75b-9a327d661dc0\") " pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:51 crc kubenswrapper[4632]: I1201 06:45:51.866192 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nqqbv" Dec 01 06:45:52 crc kubenswrapper[4632]: I1201 06:45:52.504014 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nqqbv"] Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.103948 4632 generic.go:334] "Generic (PLEG): container finished" podID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerID="0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.104174 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerDied","Data":"0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.105792 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" event={"ID":"559abf1e-dc19-40e9-b75b-9a327d661dc0","Type":"ContainerStarted","Data":"4801a951057515da5b9127c2bccfdca5add71f935f6c61deab378e3a7a1ea85a"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.105820 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" event={"ID":"559abf1e-dc19-40e9-b75b-9a327d661dc0","Type":"ContainerStarted","Data":"054f7365f1d88ab1190871cc004443faf278c06dd3a2bdedc2cc3e1fdf29169b"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.105832 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nqqbv" event={"ID":"559abf1e-dc19-40e9-b75b-9a327d661dc0","Type":"ContainerStarted","Data":"c1321bf6dc476decb1e6184232a2f307ca4a83a1a67eef0818f47f59f9abe4ea"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.107743 4632 generic.go:334] "Generic (PLEG): container finished" podID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerID="e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.107785 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerDied","Data":"e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.111590 4632 generic.go:334] "Generic (PLEG): container finished" podID="56a27481-d126-4d90-8082-75063f21c2ac" containerID="f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.111627 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerDied","Data":"f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.113670 4632 generic.go:334] "Generic (PLEG): container finished" podID="84310075-a3e6-46ff-906c-372ee393d197" containerID="c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.113769 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerDied","Data":"c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.115712 4632 generic.go:334] "Generic (PLEG): container finished" podID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerID="54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.115776 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerDied","Data":"54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.121616 4632 generic.go:334] "Generic (PLEG): container finished" podID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerID="ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.121751 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerDied","Data":"ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.126570 4632 generic.go:334] "Generic (PLEG): container finished" podID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerID="33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.126633 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerDied","Data":"33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.130448 4632 generic.go:334] "Generic (PLEG): container finished" podID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerID="5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac" exitCode=0 Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.130480 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerDied","Data":"5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac"} Dec 01 06:45:53 crc kubenswrapper[4632]: I1201 06:45:53.139110 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nqqbv" podStartSLOduration=143.139098746 podStartE2EDuration="2m23.139098746s" podCreationTimestamp="2025-12-01 06:43:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:45:53.137576502 +0000 UTC m=+162.702589475" watchObservedRunningTime="2025-12-01 06:45:53.139098746 +0000 UTC m=+162.704111719" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.137484 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerStarted","Data":"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.140178 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerStarted","Data":"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.141903 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerStarted","Data":"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.144181 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerStarted","Data":"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.145935 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerStarted","Data":"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.147597 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerStarted","Data":"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.149795 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerStarted","Data":"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.151805 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerStarted","Data":"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe"} Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.159561 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t2hmz" podStartSLOduration=2.348687408 podStartE2EDuration="20.159546094s" podCreationTimestamp="2025-12-01 06:45:34 +0000 UTC" firstStartedPulling="2025-12-01 06:45:35.951796778 +0000 UTC m=+145.516809751" lastFinishedPulling="2025-12-01 06:45:53.762655464 +0000 UTC m=+163.327668437" observedRunningTime="2025-12-01 06:45:54.157976481 +0000 UTC m=+163.722989474" watchObservedRunningTime="2025-12-01 06:45:54.159546094 +0000 UTC m=+163.724559068" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.184058 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dgtqs" podStartSLOduration=2.485036088 podStartE2EDuration="23.18404351s" podCreationTimestamp="2025-12-01 06:45:31 +0000 UTC" firstStartedPulling="2025-12-01 06:45:32.912865177 +0000 UTC m=+142.477878149" lastFinishedPulling="2025-12-01 06:45:53.611872609 +0000 UTC m=+163.176885571" observedRunningTime="2025-12-01 06:45:54.18257104 +0000 UTC m=+163.747584013" watchObservedRunningTime="2025-12-01 06:45:54.18404351 +0000 UTC m=+163.749056483" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.212704 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qjftw" podStartSLOduration=2.36795614 podStartE2EDuration="21.212687304s" podCreationTimestamp="2025-12-01 06:45:33 +0000 UTC" firstStartedPulling="2025-12-01 06:45:34.933435298 +0000 UTC m=+144.498448270" lastFinishedPulling="2025-12-01 06:45:53.778166461 +0000 UTC m=+163.343179434" observedRunningTime="2025-12-01 06:45:54.199060405 +0000 UTC m=+163.764073378" watchObservedRunningTime="2025-12-01 06:45:54.212687304 +0000 UTC m=+163.777700278" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.213156 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2cmb4" podStartSLOduration=2.374460533 podStartE2EDuration="23.213151061s" podCreationTimestamp="2025-12-01 06:45:31 +0000 UTC" firstStartedPulling="2025-12-01 06:45:32.905424557 +0000 UTC m=+142.470437531" lastFinishedPulling="2025-12-01 06:45:53.744115097 +0000 UTC m=+163.309128059" observedRunningTime="2025-12-01 06:45:54.211233249 +0000 UTC m=+163.776246222" watchObservedRunningTime="2025-12-01 06:45:54.213151061 +0000 UTC m=+163.778164034" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.248325 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h6xj5" podStartSLOduration=2.5718260429999997 podStartE2EDuration="23.248308484s" podCreationTimestamp="2025-12-01 06:45:31 +0000 UTC" firstStartedPulling="2025-12-01 06:45:32.909123392 +0000 UTC m=+142.474136364" lastFinishedPulling="2025-12-01 06:45:53.585605832 +0000 UTC m=+163.150618805" observedRunningTime="2025-12-01 06:45:54.22797999 +0000 UTC m=+163.792992963" watchObservedRunningTime="2025-12-01 06:45:54.248308484 +0000 UTC m=+163.813321457" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.265603 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lcqc9" podStartSLOduration=1.529818567 podStartE2EDuration="21.265584555s" podCreationTimestamp="2025-12-01 06:45:33 +0000 UTC" firstStartedPulling="2025-12-01 06:45:33.917698364 +0000 UTC m=+143.482711336" lastFinishedPulling="2025-12-01 06:45:53.653464351 +0000 UTC m=+163.218477324" observedRunningTime="2025-12-01 06:45:54.249171112 +0000 UTC m=+163.814184095" watchObservedRunningTime="2025-12-01 06:45:54.265584555 +0000 UTC m=+163.830597528" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.284280 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5w5vx" podStartSLOduration=2.446398683 podStartE2EDuration="23.284268312s" podCreationTimestamp="2025-12-01 06:45:31 +0000 UTC" firstStartedPulling="2025-12-01 06:45:32.907880205 +0000 UTC m=+142.472893177" lastFinishedPulling="2025-12-01 06:45:53.745749833 +0000 UTC m=+163.310762806" observedRunningTime="2025-12-01 06:45:54.282127891 +0000 UTC m=+163.847140874" watchObservedRunningTime="2025-12-01 06:45:54.284268312 +0000 UTC m=+163.849281285" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.284722 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n24r7" podStartSLOduration=2.609005967 podStartE2EDuration="20.284718092s" podCreationTimestamp="2025-12-01 06:45:34 +0000 UTC" firstStartedPulling="2025-12-01 06:45:35.969268728 +0000 UTC m=+145.534281701" lastFinishedPulling="2025-12-01 06:45:53.644980853 +0000 UTC m=+163.209993826" observedRunningTime="2025-12-01 06:45:54.2646072 +0000 UTC m=+163.829620172" watchObservedRunningTime="2025-12-01 06:45:54.284718092 +0000 UTC m=+163.849731055" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.574217 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.574267 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.973800 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:54 crc kubenswrapper[4632]: I1201 06:45:54.973854 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:45:55 crc kubenswrapper[4632]: I1201 06:45:55.651589 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-n24r7" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="registry-server" probeResult="failure" output=< Dec 01 06:45:55 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 06:45:55 crc kubenswrapper[4632]: > Dec 01 06:45:56 crc kubenswrapper[4632]: I1201 06:45:56.002691 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-t2hmz" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="registry-server" probeResult="failure" output=< Dec 01 06:45:56 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 06:45:56 crc kubenswrapper[4632]: > Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.374736 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.375565 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.411887 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.569694 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.569922 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.603223 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.775872 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.776112 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:01 crc kubenswrapper[4632]: I1201 06:46:01.803466 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.054137 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.054179 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.080060 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.215739 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.215948 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.216181 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:02 crc kubenswrapper[4632]: I1201 06:46:02.216425 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.430563 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.585490 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.585706 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.613471 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.969605 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.969846 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:03 crc kubenswrapper[4632]: I1201 06:46:03.998392 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.030578 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.197220 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2cmb4" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="registry-server" containerID="cri-o://6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc" gracePeriod=2 Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.222914 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.224805 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.558285 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.604892 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.631912 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.676479 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content\") pod \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.676612 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities\") pod \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.676688 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fksgn\" (UniqueName: \"kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn\") pod \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\" (UID: \"5cba9c81-c125-4984-9e0f-0783b49b8bf6\") " Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.677398 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities" (OuterVolumeSpecName: "utilities") pod "5cba9c81-c125-4984-9e0f-0783b49b8bf6" (UID: "5cba9c81-c125-4984-9e0f-0783b49b8bf6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.680745 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn" (OuterVolumeSpecName: "kube-api-access-fksgn") pod "5cba9c81-c125-4984-9e0f-0783b49b8bf6" (UID: "5cba9c81-c125-4984-9e0f-0783b49b8bf6"). InnerVolumeSpecName "kube-api-access-fksgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.714873 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5cba9c81-c125-4984-9e0f-0783b49b8bf6" (UID: "5cba9c81-c125-4984-9e0f-0783b49b8bf6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.778054 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fksgn\" (UniqueName: \"kubernetes.io/projected/5cba9c81-c125-4984-9e0f-0783b49b8bf6-kube-api-access-fksgn\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.778085 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:04 crc kubenswrapper[4632]: I1201 06:46:04.778095 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5cba9c81-c125-4984-9e0f-0783b49b8bf6-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.004684 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.029451 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203474 4632 generic.go:334] "Generic (PLEG): container finished" podID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerID="6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc" exitCode=0 Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203557 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerDied","Data":"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc"} Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203588 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2cmb4" event={"ID":"5cba9c81-c125-4984-9e0f-0783b49b8bf6","Type":"ContainerDied","Data":"5fbbd3f3e743e19248215db5bfb72b5c83931d67dfa7e420f0a99a984551ff54"} Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203601 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2cmb4" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203626 4632 scope.go:117] "RemoveContainer" containerID="6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.203671 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dgtqs" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="registry-server" containerID="cri-o://6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428" gracePeriod=2 Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.216436 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.217021 4632 scope.go:117] "RemoveContainer" containerID="5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.219790 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2cmb4"] Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.233566 4632 scope.go:117] "RemoveContainer" containerID="935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.294617 4632 scope.go:117] "RemoveContainer" containerID="6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc" Dec 01 06:46:05 crc kubenswrapper[4632]: E1201 06:46:05.295029 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc\": container with ID starting with 6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc not found: ID does not exist" containerID="6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.295064 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc"} err="failed to get container status \"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc\": rpc error: code = NotFound desc = could not find container \"6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc\": container with ID starting with 6d690e0ee875075bdf7e9cdeedfb973df35b13547ca078804c716e75c23c93fc not found: ID does not exist" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.295104 4632 scope.go:117] "RemoveContainer" containerID="5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac" Dec 01 06:46:05 crc kubenswrapper[4632]: E1201 06:46:05.295422 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac\": container with ID starting with 5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac not found: ID does not exist" containerID="5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.295453 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac"} err="failed to get container status \"5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac\": rpc error: code = NotFound desc = could not find container \"5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac\": container with ID starting with 5ea7da982745655437aecb7db13e0da8aefdde13dfb3746ff701e8f6baa15bac not found: ID does not exist" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.295479 4632 scope.go:117] "RemoveContainer" containerID="935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573" Dec 01 06:46:05 crc kubenswrapper[4632]: E1201 06:46:05.295752 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573\": container with ID starting with 935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573 not found: ID does not exist" containerID="935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.295779 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573"} err="failed to get container status \"935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573\": rpc error: code = NotFound desc = could not find container \"935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573\": container with ID starting with 935f93c5446f80243c5fdc4af4566b3d48b1b5f911da5bd99122719dcfa68573 not found: ID does not exist" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.563373 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.688578 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmqpk\" (UniqueName: \"kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk\") pod \"39776e22-d730-44e7-b6a5-f7707cffb5af\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.688616 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content\") pod \"39776e22-d730-44e7-b6a5-f7707cffb5af\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.688668 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities\") pod \"39776e22-d730-44e7-b6a5-f7707cffb5af\" (UID: \"39776e22-d730-44e7-b6a5-f7707cffb5af\") " Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.689291 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities" (OuterVolumeSpecName: "utilities") pod "39776e22-d730-44e7-b6a5-f7707cffb5af" (UID: "39776e22-d730-44e7-b6a5-f7707cffb5af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.692524 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk" (OuterVolumeSpecName: "kube-api-access-dmqpk") pod "39776e22-d730-44e7-b6a5-f7707cffb5af" (UID: "39776e22-d730-44e7-b6a5-f7707cffb5af"). InnerVolumeSpecName "kube-api-access-dmqpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.723861 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39776e22-d730-44e7-b6a5-f7707cffb5af" (UID: "39776e22-d730-44e7-b6a5-f7707cffb5af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.790002 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmqpk\" (UniqueName: \"kubernetes.io/projected/39776e22-d730-44e7-b6a5-f7707cffb5af-kube-api-access-dmqpk\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.790026 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:05 crc kubenswrapper[4632]: I1201 06:46:05.790036 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39776e22-d730-44e7-b6a5-f7707cffb5af-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.213191 4632 generic.go:334] "Generic (PLEG): container finished" podID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerID="6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428" exitCode=0 Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.213284 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgtqs" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.213458 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerDied","Data":"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428"} Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.213559 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgtqs" event={"ID":"39776e22-d730-44e7-b6a5-f7707cffb5af","Type":"ContainerDied","Data":"e547e3485d53b743dbc0979eda47ae44461b4083ccd1b7ef161103d6b8df98d6"} Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.213628 4632 scope.go:117] "RemoveContainer" containerID="6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.227268 4632 scope.go:117] "RemoveContainer" containerID="ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.234315 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.236812 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dgtqs"] Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.241125 4632 scope.go:117] "RemoveContainer" containerID="1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.250408 4632 scope.go:117] "RemoveContainer" containerID="6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428" Dec 01 06:46:06 crc kubenswrapper[4632]: E1201 06:46:06.250695 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428\": container with ID starting with 6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428 not found: ID does not exist" containerID="6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.250736 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428"} err="failed to get container status \"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428\": rpc error: code = NotFound desc = could not find container \"6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428\": container with ID starting with 6785e0cf52a2c5ce9f90a7151b9c53d1797001a06ebe8980efdb8ea4b5f9d428 not found: ID does not exist" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.250764 4632 scope.go:117] "RemoveContainer" containerID="ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea" Dec 01 06:46:06 crc kubenswrapper[4632]: E1201 06:46:06.251116 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea\": container with ID starting with ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea not found: ID does not exist" containerID="ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.251141 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea"} err="failed to get container status \"ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea\": rpc error: code = NotFound desc = could not find container \"ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea\": container with ID starting with ad57f7436bf8cc3afa784a7028da0db5f1431c8031c6c13ed24a36f6d4cb56ea not found: ID does not exist" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.251161 4632 scope.go:117] "RemoveContainer" containerID="1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2" Dec 01 06:46:06 crc kubenswrapper[4632]: E1201 06:46:06.251453 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2\": container with ID starting with 1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2 not found: ID does not exist" containerID="1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.251479 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2"} err="failed to get container status \"1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2\": rpc error: code = NotFound desc = could not find container \"1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2\": container with ID starting with 1462cec6eb1043f6cc54a97b983c38da749fa3e26965c70414c27ffc423d82f2 not found: ID does not exist" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.433311 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.433689 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qjftw" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="registry-server" containerID="cri-o://dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308" gracePeriod=2 Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.622280 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vtnjq" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.755336 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" path="/var/lib/kubelet/pods/39776e22-d730-44e7-b6a5-f7707cffb5af/volumes" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.755888 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" path="/var/lib/kubelet/pods/5cba9c81-c125-4984-9e0f-0783b49b8bf6/volumes" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.770581 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.908484 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities\") pod \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.908541 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content\") pod \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.908565 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzs5f\" (UniqueName: \"kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f\") pod \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\" (UID: \"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb\") " Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.909035 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities" (OuterVolumeSpecName: "utilities") pod "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" (UID: "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.911532 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f" (OuterVolumeSpecName: "kube-api-access-gzs5f") pod "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" (UID: "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb"). InnerVolumeSpecName "kube-api-access-gzs5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:06 crc kubenswrapper[4632]: I1201 06:46:06.922281 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" (UID: "ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.009815 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.009839 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.009851 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzs5f\" (UniqueName: \"kubernetes.io/projected/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb-kube-api-access-gzs5f\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.223133 4632 generic.go:334] "Generic (PLEG): container finished" podID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerID="dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308" exitCode=0 Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.223187 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerDied","Data":"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308"} Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.223198 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qjftw" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.223227 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qjftw" event={"ID":"ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb","Type":"ContainerDied","Data":"89f94bb35a22498cb8d17c47eaf00937d10a4164841be85b6a8ff0df89de7f3f"} Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.223249 4632 scope.go:117] "RemoveContainer" containerID="dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.237346 4632 scope.go:117] "RemoveContainer" containerID="33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.247538 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.248070 4632 scope.go:117] "RemoveContainer" containerID="33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.248672 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qjftw"] Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.271513 4632 scope.go:117] "RemoveContainer" containerID="dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308" Dec 01 06:46:07 crc kubenswrapper[4632]: E1201 06:46:07.272021 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308\": container with ID starting with dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308 not found: ID does not exist" containerID="dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.272055 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308"} err="failed to get container status \"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308\": rpc error: code = NotFound desc = could not find container \"dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308\": container with ID starting with dee328ae391c80b490e0f36faae3ad994211b0e4447a3adb652f43e38d41e308 not found: ID does not exist" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.272076 4632 scope.go:117] "RemoveContainer" containerID="33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad" Dec 01 06:46:07 crc kubenswrapper[4632]: E1201 06:46:07.272393 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad\": container with ID starting with 33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad not found: ID does not exist" containerID="33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.272427 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad"} err="failed to get container status \"33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad\": rpc error: code = NotFound desc = could not find container \"33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad\": container with ID starting with 33633f14a33f2a3907762373bab91b8044098b37c756f9579f4eeb7d293dc6ad not found: ID does not exist" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.272453 4632 scope.go:117] "RemoveContainer" containerID="33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5" Dec 01 06:46:07 crc kubenswrapper[4632]: E1201 06:46:07.272773 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5\": container with ID starting with 33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5 not found: ID does not exist" containerID="33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5" Dec 01 06:46:07 crc kubenswrapper[4632]: I1201 06:46:07.272798 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5"} err="failed to get container status \"33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5\": rpc error: code = NotFound desc = could not find container \"33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5\": container with ID starting with 33153bd5a5164f671d8374b73cd6d70e2aa0daf2000a89947493ff31c0a48ad5 not found: ID does not exist" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.203282 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.203493 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerName="controller-manager" containerID="cri-o://cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c" gracePeriod=30 Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.301879 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.302072 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" podUID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" containerName="route-controller-manager" containerID="cri-o://66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b" gracePeriod=30 Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.630933 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.664555 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.725640 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config\") pod \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.725899 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca\") pod \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726021 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert\") pod \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726141 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca\") pod \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726226 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert\") pod \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726341 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config\") pod \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726461 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qwhw\" (UniqueName: \"kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw\") pod \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\" (UID: \"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726560 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles\") pod \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726650 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvqlf\" (UniqueName: \"kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf\") pod \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\" (UID: \"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa\") " Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726335 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca" (OuterVolumeSpecName: "client-ca") pod "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" (UID: "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726435 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config" (OuterVolumeSpecName: "config") pod "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" (UID: "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726623 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca" (OuterVolumeSpecName: "client-ca") pod "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" (UID: "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.726795 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config" (OuterVolumeSpecName: "config") pod "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" (UID: "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.727162 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" (UID: "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.729965 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" (UID: "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.729962 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" (UID: "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.730072 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw" (OuterVolumeSpecName: "kube-api-access-7qwhw") pod "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" (UID: "6be1d7b9-4a80-4b57-a4ac-71cf05c921f9"). InnerVolumeSpecName "kube-api-access-7qwhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.731671 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf" (OuterVolumeSpecName: "kube-api-access-cvqlf") pod "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" (UID: "d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa"). InnerVolumeSpecName "kube-api-access-cvqlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.755335 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" path="/var/lib/kubelet/pods/ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb/volumes" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833104 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833218 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833228 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qwhw\" (UniqueName: \"kubernetes.io/projected/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-kube-api-access-7qwhw\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833339 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833367 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvqlf\" (UniqueName: \"kubernetes.io/projected/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-kube-api-access-cvqlf\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833376 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833388 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833396 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.833520 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.834954 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:46:08 crc kubenswrapper[4632]: I1201 06:46:08.835252 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-t2hmz" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="registry-server" containerID="cri-o://bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7" gracePeriod=2 Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.086927 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.234705 4632 generic.go:334] "Generic (PLEG): container finished" podID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerID="cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c" exitCode=0 Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.234779 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" event={"ID":"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa","Type":"ContainerDied","Data":"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.234841 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.234861 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-d8bfn" event={"ID":"d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa","Type":"ContainerDied","Data":"c422236c2d457b76b27b0a898160ff00c57b76ccc1d9e76f5f3ed5e07bebfae1"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.234895 4632 scope.go:117] "RemoveContainer" containerID="cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.236757 4632 generic.go:334] "Generic (PLEG): container finished" podID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" containerID="66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b" exitCode=0 Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.236839 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" event={"ID":"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9","Type":"ContainerDied","Data":"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.236871 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" event={"ID":"6be1d7b9-4a80-4b57-a4ac-71cf05c921f9","Type":"ContainerDied","Data":"2f0d7078458fca8dc2c85a5f8f902282ca814c99d73423455db4ece087118dbb"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.236876 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.237220 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities\") pod \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.237276 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtzkz\" (UniqueName: \"kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz\") pod \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.237374 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content\") pod \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\" (UID: \"dac7bd07-63ed-44e6-9c05-cec09b9690e9\") " Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.237788 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities" (OuterVolumeSpecName: "utilities") pod "dac7bd07-63ed-44e6-9c05-cec09b9690e9" (UID: "dac7bd07-63ed-44e6-9c05-cec09b9690e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.239697 4632 generic.go:334] "Generic (PLEG): container finished" podID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerID="bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7" exitCode=0 Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.239724 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerDied","Data":"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.239906 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t2hmz" event={"ID":"dac7bd07-63ed-44e6-9c05-cec09b9690e9","Type":"ContainerDied","Data":"7fc035384b2076599d9648431ef04715184ac1691d338b6377335aadad0bc059"} Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.239836 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz" (OuterVolumeSpecName: "kube-api-access-qtzkz") pod "dac7bd07-63ed-44e6-9c05-cec09b9690e9" (UID: "dac7bd07-63ed-44e6-9c05-cec09b9690e9"). InnerVolumeSpecName "kube-api-access-qtzkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.239754 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t2hmz" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.249034 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.250442 4632 scope.go:117] "RemoveContainer" containerID="cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c" Dec 01 06:46:09 crc kubenswrapper[4632]: E1201 06:46:09.250774 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c\": container with ID starting with cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c not found: ID does not exist" containerID="cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.250810 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c"} err="failed to get container status \"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c\": rpc error: code = NotFound desc = could not find container \"cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c\": container with ID starting with cc4cbca5efb83b78c3cfacff5ce1b3098e59fa0b087c93a4f39512bb7dc3c10c not found: ID does not exist" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.250836 4632 scope.go:117] "RemoveContainer" containerID="66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.251233 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-d8bfn"] Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.262407 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.262423 4632 scope.go:117] "RemoveContainer" containerID="66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b" Dec 01 06:46:09 crc kubenswrapper[4632]: E1201 06:46:09.262707 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b\": container with ID starting with 66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b not found: ID does not exist" containerID="66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.262741 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b"} err="failed to get container status \"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b\": rpc error: code = NotFound desc = could not find container \"66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b\": container with ID starting with 66f14f53d731878626c93c616badfddd1afa0b8d2757d6afc5d58f14f054e56b not found: ID does not exist" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.262762 4632 scope.go:117] "RemoveContainer" containerID="bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.265430 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-dxpwr"] Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.273283 4632 scope.go:117] "RemoveContainer" containerID="0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.284974 4632 scope.go:117] "RemoveContainer" containerID="fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294142 4632 scope.go:117] "RemoveContainer" containerID="bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7" Dec 01 06:46:09 crc kubenswrapper[4632]: E1201 06:46:09.294430 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7\": container with ID starting with bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7 not found: ID does not exist" containerID="bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294457 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7"} err="failed to get container status \"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7\": rpc error: code = NotFound desc = could not find container \"bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7\": container with ID starting with bd4bda7402f4caca3bb1fb41a416fc23fbc0cf417a266016e339a1ed0f67a5a7 not found: ID does not exist" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294475 4632 scope.go:117] "RemoveContainer" containerID="0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a" Dec 01 06:46:09 crc kubenswrapper[4632]: E1201 06:46:09.294675 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a\": container with ID starting with 0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a not found: ID does not exist" containerID="0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294708 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a"} err="failed to get container status \"0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a\": rpc error: code = NotFound desc = could not find container \"0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a\": container with ID starting with 0cdea75a82bd770782d4f0152a270a38783d17db0ac4e740d2997c741e71dc3a not found: ID does not exist" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294723 4632 scope.go:117] "RemoveContainer" containerID="fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4" Dec 01 06:46:09 crc kubenswrapper[4632]: E1201 06:46:09.294973 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4\": container with ID starting with fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4 not found: ID does not exist" containerID="fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.294994 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4"} err="failed to get container status \"fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4\": rpc error: code = NotFound desc = could not find container \"fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4\": container with ID starting with fa0ed86ac29b488109b9d7c1c9871efccf963b76da1eca112108348c3d0826e4 not found: ID does not exist" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.334226 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dac7bd07-63ed-44e6-9c05-cec09b9690e9" (UID: "dac7bd07-63ed-44e6-9c05-cec09b9690e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.338600 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.338627 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtzkz\" (UniqueName: \"kubernetes.io/projected/dac7bd07-63ed-44e6-9c05-cec09b9690e9-kube-api-access-qtzkz\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.338638 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac7bd07-63ed-44e6-9c05-cec09b9690e9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.562783 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:46:09 crc kubenswrapper[4632]: I1201 06:46:09.565119 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-t2hmz"] Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017511 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017686 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017697 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017705 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerName="controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017711 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerName="controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017721 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" containerName="route-controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017727 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" containerName="route-controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017736 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017741 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017747 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017752 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017760 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017766 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017772 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017777 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017784 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017789 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017798 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017803 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017810 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017815 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017822 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017827 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017835 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017840 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017847 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c9510b5-441a-4e44-bfcb-5feb7abc998a" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017852 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c9510b5-441a-4e44-bfcb-5feb7abc998a" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017859 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55ebde61-9ce4-4769-9326-fb97d8bf6648" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017864 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="55ebde61-9ce4-4769-9326-fb97d8bf6648" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017872 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017878 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="extract-utilities" Dec 01 06:46:10 crc kubenswrapper[4632]: E1201 06:46:10.017883 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017888 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="extract-content" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017973 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017982 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="39776e22-d730-44e7-b6a5-f7707cffb5af" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017990 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" containerName="controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.017998 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="55ebde61-9ce4-4769-9326-fb97d8bf6648" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.018006 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddf1a2a6-d01c-45d0-b8e1-6e75690f8feb" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.018014 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" containerName="route-controller-manager" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.018021 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c9510b5-441a-4e44-bfcb-5feb7abc998a" containerName="pruner" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.018028 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cba9c81-c125-4984-9e0f-0783b49b8bf6" containerName="registry-server" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.018375 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020289 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020517 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020521 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020590 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020634 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020670 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.020520 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.021299 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.023503 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.023734 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.024293 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.024426 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.024547 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.024874 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.026639 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.031392 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.033166 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147692 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147728 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147749 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147803 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8vcz\" (UniqueName: \"kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147823 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147845 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147860 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147884 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.147903 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7ksj\" (UniqueName: \"kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248322 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7ksj\" (UniqueName: \"kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248363 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248385 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248403 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248453 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8vcz\" (UniqueName: \"kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248473 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248495 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248512 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.248530 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.249423 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.249475 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.249507 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.249578 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.249783 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.251769 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.260741 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.262647 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7ksj\" (UniqueName: \"kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj\") pod \"controller-manager-54764d6f49-2kbxl\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.266002 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8vcz\" (UniqueName: \"kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz\") pod \"route-controller-manager-c47c9657f-b87qg\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.331660 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.336710 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.671018 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:10 crc kubenswrapper[4632]: W1201 06:46:10.677869 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod821bb2a6_e9b4_4f5f_a654_705d7e503709.slice/crio-610fd5ee680f0d2d7a095bf7226d722709681c96286274845bd51167effc2569 WatchSource:0}: Error finding container 610fd5ee680f0d2d7a095bf7226d722709681c96286274845bd51167effc2569: Status 404 returned error can't find the container with id 610fd5ee680f0d2d7a095bf7226d722709681c96286274845bd51167effc2569 Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.717542 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.758720 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6be1d7b9-4a80-4b57-a4ac-71cf05c921f9" path="/var/lib/kubelet/pods/6be1d7b9-4a80-4b57-a4ac-71cf05c921f9/volumes" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.759193 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa" path="/var/lib/kubelet/pods/d6bc069b-ca4f-4736-b6ea-cb36d0e2cffa/volumes" Dec 01 06:46:10 crc kubenswrapper[4632]: I1201 06:46:10.759692 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dac7bd07-63ed-44e6-9c05-cec09b9690e9" path="/var/lib/kubelet/pods/dac7bd07-63ed-44e6-9c05-cec09b9690e9/volumes" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.251979 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" event={"ID":"821bb2a6-e9b4-4f5f-a654-705d7e503709","Type":"ContainerStarted","Data":"26400a3ccfe858b9190589361a4541a34973f7acede7903fe42dc8ba91522ff6"} Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.252233 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" event={"ID":"821bb2a6-e9b4-4f5f-a654-705d7e503709","Type":"ContainerStarted","Data":"610fd5ee680f0d2d7a095bf7226d722709681c96286274845bd51167effc2569"} Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.252480 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.253268 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" event={"ID":"bc056355-cd57-4372-8f91-21ca2eeaf721","Type":"ContainerStarted","Data":"f0e39864dc78758109c137ca1ab4bade3a663a06b397aa80e0cec67909e61132"} Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.253325 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" event={"ID":"bc056355-cd57-4372-8f91-21ca2eeaf721","Type":"ContainerStarted","Data":"90bc4c094dbd004a50df82c7d06929ca38eaa7596674f7a031e8b377d1748e77"} Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.253685 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.257557 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.258712 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.267753 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" podStartSLOduration=3.267744501 podStartE2EDuration="3.267744501s" podCreationTimestamp="2025-12-01 06:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:11.264483033 +0000 UTC m=+180.829496006" watchObservedRunningTime="2025-12-01 06:46:11.267744501 +0000 UTC m=+180.832757474" Dec 01 06:46:11 crc kubenswrapper[4632]: I1201 06:46:11.280643 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" podStartSLOduration=3.280631715 podStartE2EDuration="3.280631715s" podCreationTimestamp="2025-12-01 06:46:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:11.279553398 +0000 UTC m=+180.844566371" watchObservedRunningTime="2025-12-01 06:46:11.280631715 +0000 UTC m=+180.845644688" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.329387 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.330397 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.332514 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.332705 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.337733 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.483295 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.483397 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.585023 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.585115 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.585179 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.609771 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.643320 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:13 crc kubenswrapper[4632]: I1201 06:46:13.995840 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 01 06:46:14 crc kubenswrapper[4632]: I1201 06:46:14.268621 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea","Type":"ContainerStarted","Data":"ff2588e241cae601c37e74c2d441c6fa104c55cb74e7f9d7e992e90e7d703b47"} Dec 01 06:46:14 crc kubenswrapper[4632]: I1201 06:46:14.268923 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea","Type":"ContainerStarted","Data":"d554f68d94f6699f7ee0ab4f00c823720ac4875d887f0fe1c13732998c6496b6"} Dec 01 06:46:14 crc kubenswrapper[4632]: I1201 06:46:14.282715 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.282695603 podStartE2EDuration="1.282695603s" podCreationTimestamp="2025-12-01 06:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:14.280454251 +0000 UTC m=+183.845467224" watchObservedRunningTime="2025-12-01 06:46:14.282695603 +0000 UTC m=+183.847708575" Dec 01 06:46:15 crc kubenswrapper[4632]: I1201 06:46:15.275685 4632 generic.go:334] "Generic (PLEG): container finished" podID="e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" containerID="ff2588e241cae601c37e74c2d441c6fa104c55cb74e7f9d7e992e90e7d703b47" exitCode=0 Dec 01 06:46:15 crc kubenswrapper[4632]: I1201 06:46:15.275728 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea","Type":"ContainerDied","Data":"ff2588e241cae601c37e74c2d441c6fa104c55cb74e7f9d7e992e90e7d703b47"} Dec 01 06:46:15 crc kubenswrapper[4632]: I1201 06:46:15.415946 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.512630 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.626045 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access\") pod \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.626228 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir\") pod \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\" (UID: \"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea\") " Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.626370 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" (UID: "e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.626616 4632 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.631811 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" (UID: "e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.727849 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:16 crc kubenswrapper[4632]: I1201 06:46:16.866088 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 01 06:46:17 crc kubenswrapper[4632]: I1201 06:46:17.288573 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea","Type":"ContainerDied","Data":"d554f68d94f6699f7ee0ab4f00c823720ac4875d887f0fe1c13732998c6496b6"} Dec 01 06:46:17 crc kubenswrapper[4632]: I1201 06:46:17.288639 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d554f68d94f6699f7ee0ab4f00c823720ac4875d887f0fe1c13732998c6496b6" Dec 01 06:46:17 crc kubenswrapper[4632]: I1201 06:46:17.288645 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 01 06:46:19 crc kubenswrapper[4632]: I1201 06:46:19.498066 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:46:19 crc kubenswrapper[4632]: I1201 06:46:19.498377 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.729071 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:46:20 crc kubenswrapper[4632]: E1201 06:46:20.729252 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" containerName="pruner" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.729263 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" containerName="pruner" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.729393 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27064c7-5fc5-4a2b-9c28-cd0a43cb2bea" containerName="pruner" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.729758 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.732757 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.734387 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.744883 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.888145 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.888194 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.888443 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.990176 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.990230 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.990328 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.990417 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:20 crc kubenswrapper[4632]: I1201 06:46:20.990546 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:21 crc kubenswrapper[4632]: I1201 06:46:21.005676 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access\") pod \"installer-9-crc\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:21 crc kubenswrapper[4632]: I1201 06:46:21.043804 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:46:21 crc kubenswrapper[4632]: I1201 06:46:21.408054 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 01 06:46:21 crc kubenswrapper[4632]: W1201 06:46:21.419179 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod06aced22_ea91_4173_98a5_df8d91ebe64d.slice/crio-cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89 WatchSource:0}: Error finding container cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89: Status 404 returned error can't find the container with id cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89 Dec 01 06:46:22 crc kubenswrapper[4632]: I1201 06:46:22.318530 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"06aced22-ea91-4173-98a5-df8d91ebe64d","Type":"ContainerStarted","Data":"024e52da2a83419a3730e9f0f4b488118e1ffe5b08e73a4b0cb78d1658a3b588"} Dec 01 06:46:22 crc kubenswrapper[4632]: I1201 06:46:22.318851 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"06aced22-ea91-4173-98a5-df8d91ebe64d","Type":"ContainerStarted","Data":"cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89"} Dec 01 06:46:22 crc kubenswrapper[4632]: I1201 06:46:22.332817 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.332798591 podStartE2EDuration="2.332798591s" podCreationTimestamp="2025-12-01 06:46:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:22.330285646 +0000 UTC m=+191.895298619" watchObservedRunningTime="2025-12-01 06:46:22.332798591 +0000 UTC m=+191.897811564" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.210081 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.211673 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" podUID="bc056355-cd57-4372-8f91-21ca2eeaf721" containerName="controller-manager" containerID="cri-o://f0e39864dc78758109c137ca1ab4bade3a663a06b397aa80e0cec67909e61132" gracePeriod=30 Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.224066 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.224284 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" podUID="821bb2a6-e9b4-4f5f-a654-705d7e503709" containerName="route-controller-manager" containerID="cri-o://26400a3ccfe858b9190589361a4541a34973f7acede7903fe42dc8ba91522ff6" gracePeriod=30 Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.349306 4632 generic.go:334] "Generic (PLEG): container finished" podID="821bb2a6-e9b4-4f5f-a654-705d7e503709" containerID="26400a3ccfe858b9190589361a4541a34973f7acede7903fe42dc8ba91522ff6" exitCode=0 Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.349516 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" event={"ID":"821bb2a6-e9b4-4f5f-a654-705d7e503709","Type":"ContainerDied","Data":"26400a3ccfe858b9190589361a4541a34973f7acede7903fe42dc8ba91522ff6"} Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.351160 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc056355-cd57-4372-8f91-21ca2eeaf721" containerID="f0e39864dc78758109c137ca1ab4bade3a663a06b397aa80e0cec67909e61132" exitCode=0 Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.351196 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" event={"ID":"bc056355-cd57-4372-8f91-21ca2eeaf721","Type":"ContainerDied","Data":"f0e39864dc78758109c137ca1ab4bade3a663a06b397aa80e0cec67909e61132"} Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.645030 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.724611 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.798968 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config\") pod \"821bb2a6-e9b4-4f5f-a654-705d7e503709\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799020 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca\") pod \"bc056355-cd57-4372-8f91-21ca2eeaf721\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799057 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7ksj\" (UniqueName: \"kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj\") pod \"bc056355-cd57-4372-8f91-21ca2eeaf721\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799111 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config\") pod \"bc056355-cd57-4372-8f91-21ca2eeaf721\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799146 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles\") pod \"bc056355-cd57-4372-8f91-21ca2eeaf721\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799172 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert\") pod \"821bb2a6-e9b4-4f5f-a654-705d7e503709\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799190 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8vcz\" (UniqueName: \"kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz\") pod \"821bb2a6-e9b4-4f5f-a654-705d7e503709\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799213 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca\") pod \"821bb2a6-e9b4-4f5f-a654-705d7e503709\" (UID: \"821bb2a6-e9b4-4f5f-a654-705d7e503709\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799248 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert\") pod \"bc056355-cd57-4372-8f91-21ca2eeaf721\" (UID: \"bc056355-cd57-4372-8f91-21ca2eeaf721\") " Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.799986 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "bc056355-cd57-4372-8f91-21ca2eeaf721" (UID: "bc056355-cd57-4372-8f91-21ca2eeaf721"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.800034 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca" (OuterVolumeSpecName: "client-ca") pod "821bb2a6-e9b4-4f5f-a654-705d7e503709" (UID: "821bb2a6-e9b4-4f5f-a654-705d7e503709"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.800312 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config" (OuterVolumeSpecName: "config") pod "bc056355-cd57-4372-8f91-21ca2eeaf721" (UID: "bc056355-cd57-4372-8f91-21ca2eeaf721"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.800486 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config" (OuterVolumeSpecName: "config") pod "821bb2a6-e9b4-4f5f-a654-705d7e503709" (UID: "821bb2a6-e9b4-4f5f-a654-705d7e503709"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.800487 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca" (OuterVolumeSpecName: "client-ca") pod "bc056355-cd57-4372-8f91-21ca2eeaf721" (UID: "bc056355-cd57-4372-8f91-21ca2eeaf721"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.803905 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "821bb2a6-e9b4-4f5f-a654-705d7e503709" (UID: "821bb2a6-e9b4-4f5f-a654-705d7e503709"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.803922 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc056355-cd57-4372-8f91-21ca2eeaf721" (UID: "bc056355-cd57-4372-8f91-21ca2eeaf721"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.803999 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz" (OuterVolumeSpecName: "kube-api-access-v8vcz") pod "821bb2a6-e9b4-4f5f-a654-705d7e503709" (UID: "821bb2a6-e9b4-4f5f-a654-705d7e503709"). InnerVolumeSpecName "kube-api-access-v8vcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.804400 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj" (OuterVolumeSpecName: "kube-api-access-z7ksj") pod "bc056355-cd57-4372-8f91-21ca2eeaf721" (UID: "bc056355-cd57-4372-8f91-21ca2eeaf721"). InnerVolumeSpecName "kube-api-access-z7ksj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901238 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901267 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7ksj\" (UniqueName: \"kubernetes.io/projected/bc056355-cd57-4372-8f91-21ca2eeaf721-kube-api-access-z7ksj\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901276 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901285 4632 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/bc056355-cd57-4372-8f91-21ca2eeaf721-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901293 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/821bb2a6-e9b4-4f5f-a654-705d7e503709-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901301 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8vcz\" (UniqueName: \"kubernetes.io/projected/821bb2a6-e9b4-4f5f-a654-705d7e503709-kube-api-access-v8vcz\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901309 4632 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-client-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901317 4632 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc056355-cd57-4372-8f91-21ca2eeaf721-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:28 crc kubenswrapper[4632]: I1201 06:46:28.901325 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/821bb2a6-e9b4-4f5f-a654-705d7e503709-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.359097 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" event={"ID":"821bb2a6-e9b4-4f5f-a654-705d7e503709","Type":"ContainerDied","Data":"610fd5ee680f0d2d7a095bf7226d722709681c96286274845bd51167effc2569"} Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.359132 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg" Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.360106 4632 scope.go:117] "RemoveContainer" containerID="26400a3ccfe858b9190589361a4541a34973f7acede7903fe42dc8ba91522ff6" Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.360551 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" event={"ID":"bc056355-cd57-4372-8f91-21ca2eeaf721","Type":"ContainerDied","Data":"90bc4c094dbd004a50df82c7d06929ca38eaa7596674f7a031e8b377d1748e77"} Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.360644 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54764d6f49-2kbxl" Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.374412 4632 scope.go:117] "RemoveContainer" containerID="f0e39864dc78758109c137ca1ab4bade3a663a06b397aa80e0cec67909e61132" Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.395686 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.397452 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-54764d6f49-2kbxl"] Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.404632 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:29 crc kubenswrapper[4632]: I1201 06:46:29.407576 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c47c9657f-b87qg"] Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.036799 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z"] Dec 01 06:46:30 crc kubenswrapper[4632]: E1201 06:46:30.037061 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc056355-cd57-4372-8f91-21ca2eeaf721" containerName="controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.037080 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc056355-cd57-4372-8f91-21ca2eeaf721" containerName="controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: E1201 06:46:30.037102 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821bb2a6-e9b4-4f5f-a654-705d7e503709" containerName="route-controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.037109 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="821bb2a6-e9b4-4f5f-a654-705d7e503709" containerName="route-controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.037208 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="821bb2a6-e9b4-4f5f-a654-705d7e503709" containerName="route-controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.037220 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc056355-cd57-4372-8f91-21ca2eeaf721" containerName="controller-manager" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.037631 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.038813 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t"] Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.044734 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.047966 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.049677 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.052491 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.052546 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.052686 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.052922 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.053011 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.054159 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.054483 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.054599 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.054747 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.056933 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.057841 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z"] Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.059764 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.059922 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t"] Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.116861 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9vcb\" (UniqueName: \"kubernetes.io/projected/9e9b247f-a825-4c22-a5f4-73510c6c1c70-kube-api-access-j9vcb\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.116903 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-proxy-ca-bundles\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.116928 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mmnt\" (UniqueName: \"kubernetes.io/projected/9ed917d8-1bad-4da5-8638-f3a164d7abae-kube-api-access-9mmnt\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.116955 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-client-ca\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.116980 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-config\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.117122 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed917d8-1bad-4da5-8638-f3a164d7abae-serving-cert\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.117181 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-client-ca\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.117232 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9b247f-a825-4c22-a5f4-73510c6c1c70-serving-cert\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.117322 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-config\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218572 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9b247f-a825-4c22-a5f4-73510c6c1c70-serving-cert\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218641 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-config\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218683 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9vcb\" (UniqueName: \"kubernetes.io/projected/9e9b247f-a825-4c22-a5f4-73510c6c1c70-kube-api-access-j9vcb\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218703 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-proxy-ca-bundles\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218725 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mmnt\" (UniqueName: \"kubernetes.io/projected/9ed917d8-1bad-4da5-8638-f3a164d7abae-kube-api-access-9mmnt\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218753 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-client-ca\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218774 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-config\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218809 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed917d8-1bad-4da5-8638-f3a164d7abae-serving-cert\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.218835 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-client-ca\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.219996 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-client-ca\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.220707 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-proxy-ca-bundles\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.220773 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-config\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.220978 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9e9b247f-a825-4c22-a5f4-73510c6c1c70-client-ca\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.221489 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9ed917d8-1bad-4da5-8638-f3a164d7abae-config\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.223161 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9ed917d8-1bad-4da5-8638-f3a164d7abae-serving-cert\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.224647 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e9b247f-a825-4c22-a5f4-73510c6c1c70-serving-cert\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.233287 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mmnt\" (UniqueName: \"kubernetes.io/projected/9ed917d8-1bad-4da5-8638-f3a164d7abae-kube-api-access-9mmnt\") pod \"controller-manager-58b6bc6fbf-nk97z\" (UID: \"9ed917d8-1bad-4da5-8638-f3a164d7abae\") " pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.234407 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9vcb\" (UniqueName: \"kubernetes.io/projected/9e9b247f-a825-4c22-a5f4-73510c6c1c70-kube-api-access-j9vcb\") pod \"route-controller-manager-7f8b7977ff-gdz2t\" (UID: \"9e9b247f-a825-4c22-a5f4-73510c6c1c70\") " pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.363827 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.370220 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.722793 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z"] Dec 01 06:46:30 crc kubenswrapper[4632]: W1201 06:46:30.727850 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9ed917d8_1bad_4da5_8638_f3a164d7abae.slice/crio-06296a697b25fb1b412bc36c70b37f7f6282e5fc0ed6c27bdb0f15f6dbb58758 WatchSource:0}: Error finding container 06296a697b25fb1b412bc36c70b37f7f6282e5fc0ed6c27bdb0f15f6dbb58758: Status 404 returned error can't find the container with id 06296a697b25fb1b412bc36c70b37f7f6282e5fc0ed6c27bdb0f15f6dbb58758 Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.763917 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821bb2a6-e9b4-4f5f-a654-705d7e503709" path="/var/lib/kubelet/pods/821bb2a6-e9b4-4f5f-a654-705d7e503709/volumes" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.764630 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc056355-cd57-4372-8f91-21ca2eeaf721" path="/var/lib/kubelet/pods/bc056355-cd57-4372-8f91-21ca2eeaf721/volumes" Dec 01 06:46:30 crc kubenswrapper[4632]: I1201 06:46:30.765060 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t"] Dec 01 06:46:30 crc kubenswrapper[4632]: W1201 06:46:30.777168 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e9b247f_a825_4c22_a5f4_73510c6c1c70.slice/crio-60f8aee56f3df0b39af933a94304c027735bd56eaa86ef8e9720020022ecb6b8 WatchSource:0}: Error finding container 60f8aee56f3df0b39af933a94304c027735bd56eaa86ef8e9720020022ecb6b8: Status 404 returned error can't find the container with id 60f8aee56f3df0b39af933a94304c027735bd56eaa86ef8e9720020022ecb6b8 Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.380716 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" event={"ID":"9ed917d8-1bad-4da5-8638-f3a164d7abae","Type":"ContainerStarted","Data":"5779334b89233e5393b15308704ac7845c5a34f1b8e549a40f27b0d7ee856f03"} Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.380966 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" event={"ID":"9ed917d8-1bad-4da5-8638-f3a164d7abae","Type":"ContainerStarted","Data":"06296a697b25fb1b412bc36c70b37f7f6282e5fc0ed6c27bdb0f15f6dbb58758"} Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.382097 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.383732 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" event={"ID":"9e9b247f-a825-4c22-a5f4-73510c6c1c70","Type":"ContainerStarted","Data":"134fc844a8afa70680467055c9324275e34978cf34f729e944757e16c7733404"} Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.383789 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" event={"ID":"9e9b247f-a825-4c22-a5f4-73510c6c1c70","Type":"ContainerStarted","Data":"60f8aee56f3df0b39af933a94304c027735bd56eaa86ef8e9720020022ecb6b8"} Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.384553 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.390293 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.392820 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.395753 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58b6bc6fbf-nk97z" podStartSLOduration=3.395738115 podStartE2EDuration="3.395738115s" podCreationTimestamp="2025-12-01 06:46:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:31.392678751 +0000 UTC m=+200.957691724" watchObservedRunningTime="2025-12-01 06:46:31.395738115 +0000 UTC m=+200.960751089" Dec 01 06:46:31 crc kubenswrapper[4632]: I1201 06:46:31.406335 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f8b7977ff-gdz2t" podStartSLOduration=3.406321093 podStartE2EDuration="3.406321093s" podCreationTimestamp="2025-12-01 06:46:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:31.404974731 +0000 UTC m=+200.969987705" watchObservedRunningTime="2025-12-01 06:46:31.406321093 +0000 UTC m=+200.971334067" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.439383 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerName="oauth-openshift" containerID="cri-o://66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731" gracePeriod=15 Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.878674 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970396 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb846\" (UniqueName: \"kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970453 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970503 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970530 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970545 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970567 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970612 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970636 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970677 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970710 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970729 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970745 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970762 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.970789 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca\") pod \"3681c29d-32b7-4037-bd1c-18c2733173bc\" (UID: \"3681c29d-32b7-4037-bd1c-18c2733173bc\") " Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.971531 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.972107 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.972148 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.972180 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.972371 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.976016 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.976176 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846" (OuterVolumeSpecName: "kube-api-access-kb846") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "kube-api-access-kb846". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.976542 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.976949 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.977334 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.977686 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.977786 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.978049 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:40 crc kubenswrapper[4632]: I1201 06:46:40.978393 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "3681c29d-32b7-4037-bd1c-18c2733173bc" (UID: "3681c29d-32b7-4037-bd1c-18c2733173bc"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.041780 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-fw8hk"] Dec 01 06:46:41 crc kubenswrapper[4632]: E1201 06:46:41.042025 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerName="oauth-openshift" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.042042 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerName="oauth-openshift" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.042144 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerName="oauth-openshift" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.042577 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.052218 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-fw8hk"] Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.071774 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.071804 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.071964 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.071991 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb846\" (UniqueName: \"kubernetes.io/projected/3681c29d-32b7-4037-bd1c-18c2733173bc-kube-api-access-kb846\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072002 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072013 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072023 4632 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072034 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072044 4632 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/3681c29d-32b7-4037-bd1c-18c2733173bc-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072052 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072062 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072074 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072083 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.072092 4632 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/3681c29d-32b7-4037-bd1c-18c2733173bc-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173511 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173560 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sl2dx\" (UniqueName: \"kubernetes.io/projected/42e03907-3514-4769-9077-ea773a01194b-kube-api-access-sl2dx\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173595 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173616 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173650 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.173670 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174023 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174129 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174166 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174205 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174226 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174285 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42e03907-3514-4769-9077-ea773a01194b-audit-dir\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174385 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-audit-policies\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.174437 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276121 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276175 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276224 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276253 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276868 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276906 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42e03907-3514-4769-9077-ea773a01194b-audit-dir\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.276994 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-audit-policies\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277053 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277090 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277122 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sl2dx\" (UniqueName: \"kubernetes.io/projected/42e03907-3514-4769-9077-ea773a01194b-kube-api-access-sl2dx\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277172 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277201 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277247 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277266 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277504 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/42e03907-3514-4769-9077-ea773a01194b-audit-dir\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277586 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.277939 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-service-ca\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.278103 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-audit-policies\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.279282 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.279867 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.279882 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-error\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.279947 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.280059 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-login\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.280611 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.280771 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-session\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.280940 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-system-router-certs\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.282101 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/42e03907-3514-4769-9077-ea773a01194b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.291656 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sl2dx\" (UniqueName: \"kubernetes.io/projected/42e03907-3514-4769-9077-ea773a01194b-kube-api-access-sl2dx\") pod \"oauth-openshift-8488df84f9-fw8hk\" (UID: \"42e03907-3514-4769-9077-ea773a01194b\") " pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.368175 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.459124 4632 generic.go:334] "Generic (PLEG): container finished" podID="3681c29d-32b7-4037-bd1c-18c2733173bc" containerID="66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731" exitCode=0 Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.459218 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" event={"ID":"3681c29d-32b7-4037-bd1c-18c2733173bc","Type":"ContainerDied","Data":"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731"} Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.459229 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.459322 4632 scope.go:117] "RemoveContainer" containerID="66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.459308 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lw8pk" event={"ID":"3681c29d-32b7-4037-bd1c-18c2733173bc","Type":"ContainerDied","Data":"fa79ce6097cd508c08b59552ee6cfbb6752fe6aee34d2b367f655107bd13e75e"} Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.488100 4632 scope.go:117] "RemoveContainer" containerID="66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731" Dec 01 06:46:41 crc kubenswrapper[4632]: E1201 06:46:41.488623 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731\": container with ID starting with 66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731 not found: ID does not exist" containerID="66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.488664 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731"} err="failed to get container status \"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731\": rpc error: code = NotFound desc = could not find container \"66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731\": container with ID starting with 66eb0ca573028c0b3a692898f11082092d4d7790a4f79ed0525ce28e74522731 not found: ID does not exist" Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.496085 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.498115 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lw8pk"] Dec 01 06:46:41 crc kubenswrapper[4632]: I1201 06:46:41.736467 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-8488df84f9-fw8hk"] Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.468130 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" event={"ID":"42e03907-3514-4769-9077-ea773a01194b","Type":"ContainerStarted","Data":"e763ad12472d7434240af8c5e7a6cfe2b547349a385501a38c3877a3323bd7c6"} Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.468697 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.468715 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" event={"ID":"42e03907-3514-4769-9077-ea773a01194b","Type":"ContainerStarted","Data":"d7224598cbc5bd343877309dcfa6834ab68a04db3d3545e1fbc4688b60aa92b6"} Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.473784 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.486423 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-8488df84f9-fw8hk" podStartSLOduration=27.486398958 podStartE2EDuration="27.486398958s" podCreationTimestamp="2025-12-01 06:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:42.484152507 +0000 UTC m=+212.049165481" watchObservedRunningTime="2025-12-01 06:46:42.486398958 +0000 UTC m=+212.051411932" Dec 01 06:46:42 crc kubenswrapper[4632]: I1201 06:46:42.755759 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3681c29d-32b7-4037-bd1c-18c2733173bc" path="/var/lib/kubelet/pods/3681c29d-32b7-4037-bd1c-18c2733173bc/volumes" Dec 01 06:46:49 crc kubenswrapper[4632]: I1201 06:46:49.498006 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:46:49 crc kubenswrapper[4632]: I1201 06:46:49.498616 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:46:49 crc kubenswrapper[4632]: I1201 06:46:49.498670 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:46:49 crc kubenswrapper[4632]: I1201 06:46:49.500111 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:46:49 crc kubenswrapper[4632]: I1201 06:46:49.500176 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9" gracePeriod=600 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.518988 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.520368 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h6xj5" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="registry-server" containerID="cri-o://3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7" gracePeriod=30 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.525152 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9" exitCode=0 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.525201 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9"} Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.525231 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a"} Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.538747 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.538962 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5w5vx" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="registry-server" containerID="cri-o://e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe" gracePeriod=30 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.548008 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.548237 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerName="marketplace-operator" containerID="cri-o://db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e" gracePeriod=30 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.553410 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crm4g"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.554180 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.562029 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.562235 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lcqc9" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="registry-server" containerID="cri-o://658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5" gracePeriod=30 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.565081 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.565375 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n24r7" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="registry-server" containerID="cri-o://6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131" gracePeriod=30 Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.573551 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crm4g"] Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.712660 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vbn9\" (UniqueName: \"kubernetes.io/projected/5afb9da5-167e-47cf-80fe-e9365ec939fd-kube-api-access-7vbn9\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.712956 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.713121 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.816766 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.816973 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vbn9\" (UniqueName: \"kubernetes.io/projected/5afb9da5-167e-47cf-80fe-e9365ec939fd-kube-api-access-7vbn9\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.817096 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.819243 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.828977 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5afb9da5-167e-47cf-80fe-e9365ec939fd-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:50 crc kubenswrapper[4632]: I1201 06:46:50.833571 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vbn9\" (UniqueName: \"kubernetes.io/projected/5afb9da5-167e-47cf-80fe-e9365ec939fd-kube-api-access-7vbn9\") pod \"marketplace-operator-79b997595-crm4g\" (UID: \"5afb9da5-167e-47cf-80fe-e9365ec939fd\") " pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.001016 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.054602 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.212630 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.224171 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content\") pod \"9b173a24-6c77-40ce-8d2e-daa317c2972c\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.224215 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities\") pod \"9b173a24-6c77-40ce-8d2e-daa317c2972c\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.224248 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ppkv\" (UniqueName: \"kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv\") pod \"9b173a24-6c77-40ce-8d2e-daa317c2972c\" (UID: \"9b173a24-6c77-40ce-8d2e-daa317c2972c\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.225740 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities" (OuterVolumeSpecName: "utilities") pod "9b173a24-6c77-40ce-8d2e-daa317c2972c" (UID: "9b173a24-6c77-40ce-8d2e-daa317c2972c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.228937 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv" (OuterVolumeSpecName: "kube-api-access-5ppkv") pod "9b173a24-6c77-40ce-8d2e-daa317c2972c" (UID: "9b173a24-6c77-40ce-8d2e-daa317c2972c"). InnerVolumeSpecName "kube-api-access-5ppkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.254768 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.264581 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.265785 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.276854 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9b173a24-6c77-40ce-8d2e-daa317c2972c" (UID: "9b173a24-6c77-40ce-8d2e-daa317c2972c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.325909 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities\") pod \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326034 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snq7r\" (UniqueName: \"kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r\") pod \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326107 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content\") pod \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\" (UID: \"fcc4ee60-b4e0-4d3f-8e08-76be1749a745\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326427 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326451 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9b173a24-6c77-40ce-8d2e-daa317c2972c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326461 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ppkv\" (UniqueName: \"kubernetes.io/projected/9b173a24-6c77-40ce-8d2e-daa317c2972c-kube-api-access-5ppkv\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.326782 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities" (OuterVolumeSpecName: "utilities") pod "fcc4ee60-b4e0-4d3f-8e08-76be1749a745" (UID: "fcc4ee60-b4e0-4d3f-8e08-76be1749a745"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.330221 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r" (OuterVolumeSpecName: "kube-api-access-snq7r") pod "fcc4ee60-b4e0-4d3f-8e08-76be1749a745" (UID: "fcc4ee60-b4e0-4d3f-8e08-76be1749a745"). InnerVolumeSpecName "kube-api-access-snq7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.341062 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fcc4ee60-b4e0-4d3f-8e08-76be1749a745" (UID: "fcc4ee60-b4e0-4d3f-8e08-76be1749a745"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.427875 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm7n7\" (UniqueName: \"kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7\") pod \"84310075-a3e6-46ff-906c-372ee393d197\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.427966 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content\") pod \"56a27481-d126-4d90-8082-75063f21c2ac\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428024 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities\") pod \"84310075-a3e6-46ff-906c-372ee393d197\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428065 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca\") pod \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428102 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk2sv\" (UniqueName: \"kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv\") pod \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428119 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities\") pod \"56a27481-d126-4d90-8082-75063f21c2ac\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428149 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics\") pod \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\" (UID: \"3c63f605-5c81-44a5-b1f5-448b6f87c7a4\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428194 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-295l2\" (UniqueName: \"kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2\") pod \"56a27481-d126-4d90-8082-75063f21c2ac\" (UID: \"56a27481-d126-4d90-8082-75063f21c2ac\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428228 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content\") pod \"84310075-a3e6-46ff-906c-372ee393d197\" (UID: \"84310075-a3e6-46ff-906c-372ee393d197\") " Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428590 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snq7r\" (UniqueName: \"kubernetes.io/projected/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-kube-api-access-snq7r\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428611 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428620 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fcc4ee60-b4e0-4d3f-8e08-76be1749a745-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.428712 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities" (OuterVolumeSpecName: "utilities") pod "84310075-a3e6-46ff-906c-372ee393d197" (UID: "84310075-a3e6-46ff-906c-372ee393d197"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.429037 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities" (OuterVolumeSpecName: "utilities") pod "56a27481-d126-4d90-8082-75063f21c2ac" (UID: "56a27481-d126-4d90-8082-75063f21c2ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.429267 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "3c63f605-5c81-44a5-b1f5-448b6f87c7a4" (UID: "3c63f605-5c81-44a5-b1f5-448b6f87c7a4"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.432776 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "3c63f605-5c81-44a5-b1f5-448b6f87c7a4" (UID: "3c63f605-5c81-44a5-b1f5-448b6f87c7a4"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.432935 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2" (OuterVolumeSpecName: "kube-api-access-295l2") pod "56a27481-d126-4d90-8082-75063f21c2ac" (UID: "56a27481-d126-4d90-8082-75063f21c2ac"). InnerVolumeSpecName "kube-api-access-295l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.433170 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7" (OuterVolumeSpecName: "kube-api-access-pm7n7") pod "84310075-a3e6-46ff-906c-372ee393d197" (UID: "84310075-a3e6-46ff-906c-372ee393d197"). InnerVolumeSpecName "kube-api-access-pm7n7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.433186 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv" (OuterVolumeSpecName: "kube-api-access-bk2sv") pod "3c63f605-5c81-44a5-b1f5-448b6f87c7a4" (UID: "3c63f605-5c81-44a5-b1f5-448b6f87c7a4"). InnerVolumeSpecName "kube-api-access-bk2sv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.479972 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84310075-a3e6-46ff-906c-372ee393d197" (UID: "84310075-a3e6-46ff-906c-372ee393d197"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.488889 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-crm4g"] Dec 01 06:46:51 crc kubenswrapper[4632]: W1201 06:46:51.491460 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5afb9da5_167e_47cf_80fe_e9365ec939fd.slice/crio-b75754067950a834ea9ee7a65eb263fdb2dce763529544d9f73c1891c97aae0e WatchSource:0}: Error finding container b75754067950a834ea9ee7a65eb263fdb2dce763529544d9f73c1891c97aae0e: Status 404 returned error can't find the container with id b75754067950a834ea9ee7a65eb263fdb2dce763529544d9f73c1891c97aae0e Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.528114 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "56a27481-d126-4d90-8082-75063f21c2ac" (UID: "56a27481-d126-4d90-8082-75063f21c2ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530031 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530050 4632 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530062 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530070 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk2sv\" (UniqueName: \"kubernetes.io/projected/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-kube-api-access-bk2sv\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530079 4632 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3c63f605-5c81-44a5-b1f5-448b6f87c7a4-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530088 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-295l2\" (UniqueName: \"kubernetes.io/projected/56a27481-d126-4d90-8082-75063f21c2ac-kube-api-access-295l2\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530096 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84310075-a3e6-46ff-906c-372ee393d197-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530106 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm7n7\" (UniqueName: \"kubernetes.io/projected/84310075-a3e6-46ff-906c-372ee393d197-kube-api-access-pm7n7\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.530114 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/56a27481-d126-4d90-8082-75063f21c2ac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.543054 4632 generic.go:334] "Generic (PLEG): container finished" podID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerID="658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5" exitCode=0 Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.543146 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerDied","Data":"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.543180 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lcqc9" event={"ID":"fcc4ee60-b4e0-4d3f-8e08-76be1749a745","Type":"ContainerDied","Data":"6141f7d035dc03d84bd497432da908f6098a628fcc929ce08218f1209000dfe7"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.543201 4632 scope.go:117] "RemoveContainer" containerID="658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.543406 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lcqc9" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.547803 4632 generic.go:334] "Generic (PLEG): container finished" podID="84310075-a3e6-46ff-906c-372ee393d197" containerID="e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe" exitCode=0 Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.547847 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerDied","Data":"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.547893 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5w5vx" event={"ID":"84310075-a3e6-46ff-906c-372ee393d197","Type":"ContainerDied","Data":"383485e3cea0f76db2f46cc22b1062fcfa9dde924dd7db1c92ab3f1a3767dc0c"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.548068 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5w5vx" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.551611 4632 generic.go:334] "Generic (PLEG): container finished" podID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerID="3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7" exitCode=0 Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.551676 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h6xj5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.551679 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerDied","Data":"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.551746 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h6xj5" event={"ID":"9b173a24-6c77-40ce-8d2e-daa317c2972c","Type":"ContainerDied","Data":"77d6a159ceacc7eabbd4c9ed0b19e14151d8f248ad06a1d15bbfe40505fbf28a"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.552571 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" event={"ID":"5afb9da5-167e-47cf-80fe-e9365ec939fd","Type":"ContainerStarted","Data":"b75754067950a834ea9ee7a65eb263fdb2dce763529544d9f73c1891c97aae0e"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.553877 4632 generic.go:334] "Generic (PLEG): container finished" podID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerID="db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e" exitCode=0 Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.553967 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" event={"ID":"3c63f605-5c81-44a5-b1f5-448b6f87c7a4","Type":"ContainerDied","Data":"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.553989 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" event={"ID":"3c63f605-5c81-44a5-b1f5-448b6f87c7a4","Type":"ContainerDied","Data":"da4c064489155000e508e54ba2c05d0cb685f5640332b3a1fd1de62798443be0"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.554070 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6vgtg" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.560127 4632 generic.go:334] "Generic (PLEG): container finished" podID="56a27481-d126-4d90-8082-75063f21c2ac" containerID="6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131" exitCode=0 Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.560168 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerDied","Data":"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.560319 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n24r7" event={"ID":"56a27481-d126-4d90-8082-75063f21c2ac","Type":"ContainerDied","Data":"d0b321282cfa94a0ab336f9ea85fa4cafdf52d2551ddb3300a29c875a9a92e58"} Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.560526 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n24r7" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.569332 4632 scope.go:117] "RemoveContainer" containerID="e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.578872 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.582873 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lcqc9"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.603467 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.605841 4632 scope.go:117] "RemoveContainer" containerID="13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.608795 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h6xj5"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.621236 4632 scope.go:117] "RemoveContainer" containerID="658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.621569 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5\": container with ID starting with 658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5 not found: ID does not exist" containerID="658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.621596 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5"} err="failed to get container status \"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5\": rpc error: code = NotFound desc = could not find container \"658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5\": container with ID starting with 658547aa283e0649b842db2722b60cfa4e9474f7e79630c6f996b28ea2c394d5 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.621616 4632 scope.go:117] "RemoveContainer" containerID="e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.621912 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3\": container with ID starting with e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3 not found: ID does not exist" containerID="e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.621941 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3"} err="failed to get container status \"e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3\": rpc error: code = NotFound desc = could not find container \"e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3\": container with ID starting with e6282a5c62e50bfe716fe3bca430b9d1e8a3fdf7801113d8b2017808603e1db3 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.621955 4632 scope.go:117] "RemoveContainer" containerID="13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.622201 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a\": container with ID starting with 13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a not found: ID does not exist" containerID="13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.622219 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a"} err="failed to get container status \"13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a\": rpc error: code = NotFound desc = could not find container \"13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a\": container with ID starting with 13a2676f62c75ed99f7b35b59e5ed3094e8eae667277b26f6fd0ccc8fe3e6e1a not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.622231 4632 scope.go:117] "RemoveContainer" containerID="e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.624507 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.638528 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6vgtg"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.640508 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.642835 4632 scope.go:117] "RemoveContainer" containerID="c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.643848 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n24r7"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.648377 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.652649 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5w5vx"] Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.676711 4632 scope.go:117] "RemoveContainer" containerID="5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.687285 4632 scope.go:117] "RemoveContainer" containerID="e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.687835 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe\": container with ID starting with e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe not found: ID does not exist" containerID="e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.687870 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe"} err="failed to get container status \"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe\": rpc error: code = NotFound desc = could not find container \"e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe\": container with ID starting with e249a012deb26804b986d47d639a3d07d9605aaeec3d0b334dac4ba13884dafe not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.687895 4632 scope.go:117] "RemoveContainer" containerID="c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.688242 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5\": container with ID starting with c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5 not found: ID does not exist" containerID="c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.688266 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5"} err="failed to get container status \"c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5\": rpc error: code = NotFound desc = could not find container \"c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5\": container with ID starting with c7e34f6d46f7de2503ac3b80d0eb0b3cfb24174f91fa5352efaec7ed9881b4e5 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.688282 4632 scope.go:117] "RemoveContainer" containerID="5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.688605 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6\": container with ID starting with 5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6 not found: ID does not exist" containerID="5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.688692 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6"} err="failed to get container status \"5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6\": rpc error: code = NotFound desc = could not find container \"5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6\": container with ID starting with 5ad3d806f1a1a93dd6667688ef9d436454f61d1e3a78f48a7c4ba444398931c6 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.688769 4632 scope.go:117] "RemoveContainer" containerID="3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.698588 4632 scope.go:117] "RemoveContainer" containerID="54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.710452 4632 scope.go:117] "RemoveContainer" containerID="5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.726683 4632 scope.go:117] "RemoveContainer" containerID="3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.727012 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7\": container with ID starting with 3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7 not found: ID does not exist" containerID="3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727115 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7"} err="failed to get container status \"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7\": rpc error: code = NotFound desc = could not find container \"3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7\": container with ID starting with 3e2be8a76ca084a670ff9c557226d3f6aa5b5556b64d5a1598fe55bbe2f5b7b7 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727196 4632 scope.go:117] "RemoveContainer" containerID="54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.727525 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209\": container with ID starting with 54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209 not found: ID does not exist" containerID="54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727553 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209"} err="failed to get container status \"54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209\": rpc error: code = NotFound desc = could not find container \"54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209\": container with ID starting with 54b7d05c1830822d0fe35b40978c5c05ee13021a47bd2151140fd958a10a7209 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727570 4632 scope.go:117] "RemoveContainer" containerID="5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.727870 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45\": container with ID starting with 5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45 not found: ID does not exist" containerID="5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727888 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45"} err="failed to get container status \"5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45\": rpc error: code = NotFound desc = could not find container \"5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45\": container with ID starting with 5c28dbfbffe7e95cd75374328c3bd2c5f1c4f2eb5b24e04f6e38452026233e45 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.727904 4632 scope.go:117] "RemoveContainer" containerID="db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.740106 4632 scope.go:117] "RemoveContainer" containerID="db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.740440 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e\": container with ID starting with db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e not found: ID does not exist" containerID="db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.740796 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e"} err="failed to get container status \"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e\": rpc error: code = NotFound desc = could not find container \"db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e\": container with ID starting with db06b44e6429880ad21d64e9fa7a727c048857a678ff2c2d87ab6c3c3750600e not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.740902 4632 scope.go:117] "RemoveContainer" containerID="6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.751734 4632 scope.go:117] "RemoveContainer" containerID="f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.764369 4632 scope.go:117] "RemoveContainer" containerID="cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773003 4632 scope.go:117] "RemoveContainer" containerID="6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.773274 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131\": container with ID starting with 6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131 not found: ID does not exist" containerID="6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773295 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131"} err="failed to get container status \"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131\": rpc error: code = NotFound desc = could not find container \"6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131\": container with ID starting with 6f1e6fb0e6acd9eb8c3484d67d1e8586bdd1a93dda406114151376afe59bc131 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773313 4632 scope.go:117] "RemoveContainer" containerID="f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.773565 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761\": container with ID starting with f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761 not found: ID does not exist" containerID="f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773582 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761"} err="failed to get container status \"f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761\": rpc error: code = NotFound desc = could not find container \"f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761\": container with ID starting with f2eaa91c22e8af9c351ecb29ae618e55e7b1dcbcbb9ebf7abc47b83ef5e93761 not found: ID does not exist" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773597 4632 scope.go:117] "RemoveContainer" containerID="cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797" Dec 01 06:46:51 crc kubenswrapper[4632]: E1201 06:46:51.773842 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797\": container with ID starting with cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797 not found: ID does not exist" containerID="cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797" Dec 01 06:46:51 crc kubenswrapper[4632]: I1201 06:46:51.773856 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797"} err="failed to get container status \"cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797\": rpc error: code = NotFound desc = could not find container \"cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797\": container with ID starting with cbfa4473aeb3bd7ba699d22724a4edba773199ba66572710099dff7d03e14797 not found: ID does not exist" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.569804 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" event={"ID":"5afb9da5-167e-47cf-80fe-e9365ec939fd","Type":"ContainerStarted","Data":"e1afc98c137604ee8715bbdc43c8c20a0abbc47c2b09653c8029e3f2b4461b72"} Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.570103 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.573563 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.584688 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-crm4g" podStartSLOduration=2.584671363 podStartE2EDuration="2.584671363s" podCreationTimestamp="2025-12-01 06:46:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:46:52.581334934 +0000 UTC m=+222.146347907" watchObservedRunningTime="2025-12-01 06:46:52.584671363 +0000 UTC m=+222.149684335" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.740884 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741133 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741148 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741158 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741164 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741172 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741178 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741188 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741194 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741200 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741206 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741212 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741217 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741228 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerName="marketplace-operator" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741244 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerName="marketplace-operator" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741251 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741256 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741265 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741271 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741295 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741301 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="extract-utilities" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741308 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741313 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741322 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741327 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: E1201 06:46:52.741334 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741340 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="extract-content" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741460 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="56a27481-d126-4d90-8082-75063f21c2ac" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741472 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741482 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="84310075-a3e6-46ff-906c-372ee393d197" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741491 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" containerName="marketplace-operator" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.741498 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" containerName="registry-server" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.742229 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.743907 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.746627 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.765092 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c63f605-5c81-44a5-b1f5-448b6f87c7a4" path="/var/lib/kubelet/pods/3c63f605-5c81-44a5-b1f5-448b6f87c7a4/volumes" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.765609 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56a27481-d126-4d90-8082-75063f21c2ac" path="/var/lib/kubelet/pods/56a27481-d126-4d90-8082-75063f21c2ac/volumes" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.766143 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84310075-a3e6-46ff-906c-372ee393d197" path="/var/lib/kubelet/pods/84310075-a3e6-46ff-906c-372ee393d197/volumes" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.767148 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b173a24-6c77-40ce-8d2e-daa317c2972c" path="/var/lib/kubelet/pods/9b173a24-6c77-40ce-8d2e-daa317c2972c/volumes" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.767726 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcc4ee60-b4e0-4d3f-8e08-76be1749a745" path="/var/lib/kubelet/pods/fcc4ee60-b4e0-4d3f-8e08-76be1749a745/volumes" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.847467 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.847563 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.847638 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8pbbn\" (UniqueName: \"kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.941188 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l4w7t"] Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.942139 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.944514 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949140 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l4w7t"] Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949303 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949367 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949422 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8pbbn\" (UniqueName: \"kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949768 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.949849 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:52 crc kubenswrapper[4632]: I1201 06:46:52.967927 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8pbbn\" (UniqueName: \"kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn\") pod \"community-operators-mwjfp\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.050885 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r89n\" (UniqueName: \"kubernetes.io/projected/de997f32-b849-4dcd-96e7-de56cfa3ec3d-kube-api-access-4r89n\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.050962 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-utilities\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.050990 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-catalog-content\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.063460 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.152332 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r89n\" (UniqueName: \"kubernetes.io/projected/de997f32-b849-4dcd-96e7-de56cfa3ec3d-kube-api-access-4r89n\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.152637 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-utilities\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.152668 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-catalog-content\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.153106 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-utilities\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.153566 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de997f32-b849-4dcd-96e7-de56cfa3ec3d-catalog-content\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.170737 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r89n\" (UniqueName: \"kubernetes.io/projected/de997f32-b849-4dcd-96e7-de56cfa3ec3d-kube-api-access-4r89n\") pod \"certified-operators-l4w7t\" (UID: \"de997f32-b849-4dcd-96e7-de56cfa3ec3d\") " pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.282565 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.426930 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.582687 4632 generic.go:334] "Generic (PLEG): container finished" podID="2123ef98-d1db-4b71-8657-51be9c899a23" containerID="cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37" exitCode=0 Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.582784 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerDied","Data":"cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37"} Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.582810 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerStarted","Data":"60f831ef28817d9df3f7fc1666ac87b47c47529c9e6b0d108df0d37760579c09"} Dec 01 06:46:53 crc kubenswrapper[4632]: I1201 06:46:53.632278 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l4w7t"] Dec 01 06:46:53 crc kubenswrapper[4632]: W1201 06:46:53.637310 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde997f32_b849_4dcd_96e7_de56cfa3ec3d.slice/crio-f6a0399754471c706f0a2edde0d35fc6c96e14d5d1061bffa5d9cbd8754daba0 WatchSource:0}: Error finding container f6a0399754471c706f0a2edde0d35fc6c96e14d5d1061bffa5d9cbd8754daba0: Status 404 returned error can't find the container with id f6a0399754471c706f0a2edde0d35fc6c96e14d5d1061bffa5d9cbd8754daba0 Dec 01 06:46:54 crc kubenswrapper[4632]: I1201 06:46:54.590293 4632 generic.go:334] "Generic (PLEG): container finished" podID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" containerID="20bae3bbf95193f7ed622ef5a5a9c301046d3de8fa826e348b13584a51d09bc8" exitCode=0 Dec 01 06:46:54 crc kubenswrapper[4632]: I1201 06:46:54.590382 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4w7t" event={"ID":"de997f32-b849-4dcd-96e7-de56cfa3ec3d","Type":"ContainerDied","Data":"20bae3bbf95193f7ed622ef5a5a9c301046d3de8fa826e348b13584a51d09bc8"} Dec 01 06:46:54 crc kubenswrapper[4632]: I1201 06:46:54.590585 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4w7t" event={"ID":"de997f32-b849-4dcd-96e7-de56cfa3ec3d","Type":"ContainerStarted","Data":"f6a0399754471c706f0a2edde0d35fc6c96e14d5d1061bffa5d9cbd8754daba0"} Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.139464 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r52z6"] Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.141073 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.143822 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.148035 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r52z6"] Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.281919 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59pnj\" (UniqueName: \"kubernetes.io/projected/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-kube-api-access-59pnj\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.281992 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-catalog-content\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.282127 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-utilities\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.340629 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-t667l"] Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.342308 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.345768 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.361363 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t667l"] Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.383539 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59pnj\" (UniqueName: \"kubernetes.io/projected/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-kube-api-access-59pnj\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.383580 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-catalog-content\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.383636 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-utilities\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.384010 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-utilities\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.384083 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-catalog-content\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.403123 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59pnj\" (UniqueName: \"kubernetes.io/projected/1468035d-a3bd-4465-9fee-f27f6f7d4d7e-kube-api-access-59pnj\") pod \"redhat-marketplace-r52z6\" (UID: \"1468035d-a3bd-4465-9fee-f27f6f7d4d7e\") " pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.457683 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.484113 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkft5\" (UniqueName: \"kubernetes.io/projected/07f1738b-a57a-41f6-909f-0f830c165731-kube-api-access-fkft5\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.484160 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-catalog-content\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.484185 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-utilities\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.585538 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkft5\" (UniqueName: \"kubernetes.io/projected/07f1738b-a57a-41f6-909f-0f830c165731-kube-api-access-fkft5\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.585593 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-catalog-content\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.585623 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-utilities\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.588856 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-catalog-content\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.596380 4632 generic.go:334] "Generic (PLEG): container finished" podID="2123ef98-d1db-4b71-8657-51be9c899a23" containerID="d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca" exitCode=0 Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.596444 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerDied","Data":"d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca"} Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.597279 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07f1738b-a57a-41f6-909f-0f830c165731-utilities\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.600844 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4w7t" event={"ID":"de997f32-b849-4dcd-96e7-de56cfa3ec3d","Type":"ContainerStarted","Data":"ebbb159a46d52893adce53fa08a91830bdfa0dca84556dc7182074ba3f75d375"} Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.608261 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkft5\" (UniqueName: \"kubernetes.io/projected/07f1738b-a57a-41f6-909f-0f830c165731-kube-api-access-fkft5\") pod \"redhat-operators-t667l\" (UID: \"07f1738b-a57a-41f6-909f-0f830c165731\") " pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.669803 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:46:55 crc kubenswrapper[4632]: I1201 06:46:55.862212 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r52z6"] Dec 01 06:46:55 crc kubenswrapper[4632]: W1201 06:46:55.866902 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1468035d_a3bd_4465_9fee_f27f6f7d4d7e.slice/crio-5bdf0c7be31c46d442af5f49b772d65dbf8453ef83a9ec1425bef81fb9aab822 WatchSource:0}: Error finding container 5bdf0c7be31c46d442af5f49b772d65dbf8453ef83a9ec1425bef81fb9aab822: Status 404 returned error can't find the container with id 5bdf0c7be31c46d442af5f49b772d65dbf8453ef83a9ec1425bef81fb9aab822 Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.015018 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-t667l"] Dec 01 06:46:56 crc kubenswrapper[4632]: W1201 06:46:56.029100 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07f1738b_a57a_41f6_909f_0f830c165731.slice/crio-5f9d66c8e11c6137d77721f688f6542f024bdc9f8347c7e7543e8de9e80f2384 WatchSource:0}: Error finding container 5f9d66c8e11c6137d77721f688f6542f024bdc9f8347c7e7543e8de9e80f2384: Status 404 returned error can't find the container with id 5f9d66c8e11c6137d77721f688f6542f024bdc9f8347c7e7543e8de9e80f2384 Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.612626 4632 generic.go:334] "Generic (PLEG): container finished" podID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" containerID="ebbb159a46d52893adce53fa08a91830bdfa0dca84556dc7182074ba3f75d375" exitCode=0 Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.612819 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4w7t" event={"ID":"de997f32-b849-4dcd-96e7-de56cfa3ec3d","Type":"ContainerDied","Data":"ebbb159a46d52893adce53fa08a91830bdfa0dca84556dc7182074ba3f75d375"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.614134 4632 generic.go:334] "Generic (PLEG): container finished" podID="07f1738b-a57a-41f6-909f-0f830c165731" containerID="706de5aa06ade1df5fcec0532de1fd66ba76394371a9f90d5be3609e2e3f1e26" exitCode=0 Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.614187 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t667l" event={"ID":"07f1738b-a57a-41f6-909f-0f830c165731","Type":"ContainerDied","Data":"706de5aa06ade1df5fcec0532de1fd66ba76394371a9f90d5be3609e2e3f1e26"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.614208 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t667l" event={"ID":"07f1738b-a57a-41f6-909f-0f830c165731","Type":"ContainerStarted","Data":"5f9d66c8e11c6137d77721f688f6542f024bdc9f8347c7e7543e8de9e80f2384"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.615846 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerStarted","Data":"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.617134 4632 generic.go:334] "Generic (PLEG): container finished" podID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" containerID="d325c19d0f467af1121b6f1a4ab5283aa5397dd3b4d3e775613af551ac498a2b" exitCode=0 Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.617158 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r52z6" event={"ID":"1468035d-a3bd-4465-9fee-f27f6f7d4d7e","Type":"ContainerDied","Data":"d325c19d0f467af1121b6f1a4ab5283aa5397dd3b4d3e775613af551ac498a2b"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.617174 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r52z6" event={"ID":"1468035d-a3bd-4465-9fee-f27f6f7d4d7e","Type":"ContainerStarted","Data":"5bdf0c7be31c46d442af5f49b772d65dbf8453ef83a9ec1425bef81fb9aab822"} Dec 01 06:46:56 crc kubenswrapper[4632]: I1201 06:46:56.642960 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mwjfp" podStartSLOduration=2.064327765 podStartE2EDuration="4.642948236s" podCreationTimestamp="2025-12-01 06:46:52 +0000 UTC" firstStartedPulling="2025-12-01 06:46:53.584384859 +0000 UTC m=+223.149397831" lastFinishedPulling="2025-12-01 06:46:56.163005329 +0000 UTC m=+225.728018302" observedRunningTime="2025-12-01 06:46:56.642650213 +0000 UTC m=+226.207663186" watchObservedRunningTime="2025-12-01 06:46:56.642948236 +0000 UTC m=+226.207961199" Dec 01 06:46:58 crc kubenswrapper[4632]: I1201 06:46:58.628258 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l4w7t" event={"ID":"de997f32-b849-4dcd-96e7-de56cfa3ec3d","Type":"ContainerStarted","Data":"d5e0b61f222fccf9b9c75868113a3494713458ef0e3b587f81a6e241fb996f02"} Dec 01 06:46:58 crc kubenswrapper[4632]: I1201 06:46:58.630592 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t667l" event={"ID":"07f1738b-a57a-41f6-909f-0f830c165731","Type":"ContainerStarted","Data":"b9fb0c81ba26a15ec3cad7c89a374d7bc6973327a55ef30a491c6ae78ebb97d0"} Dec 01 06:46:58 crc kubenswrapper[4632]: I1201 06:46:58.633404 4632 generic.go:334] "Generic (PLEG): container finished" podID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" containerID="bd11dff24a84032c744d652afe07a2b3b01b153e23a0b03c6bcba1f184af1391" exitCode=0 Dec 01 06:46:58 crc kubenswrapper[4632]: I1201 06:46:58.633458 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r52z6" event={"ID":"1468035d-a3bd-4465-9fee-f27f6f7d4d7e","Type":"ContainerDied","Data":"bd11dff24a84032c744d652afe07a2b3b01b153e23a0b03c6bcba1f184af1391"} Dec 01 06:46:58 crc kubenswrapper[4632]: I1201 06:46:58.647836 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l4w7t" podStartSLOduration=3.911315991 podStartE2EDuration="6.647815197s" podCreationTimestamp="2025-12-01 06:46:52 +0000 UTC" firstStartedPulling="2025-12-01 06:46:54.592662309 +0000 UTC m=+224.157675282" lastFinishedPulling="2025-12-01 06:46:57.329161514 +0000 UTC m=+226.894174488" observedRunningTime="2025-12-01 06:46:58.645344322 +0000 UTC m=+228.210357295" watchObservedRunningTime="2025-12-01 06:46:58.647815197 +0000 UTC m=+228.212828170" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.294311 4632 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295209 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295286 4632 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295576 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1" gracePeriod=15 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295650 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b" gracePeriod=15 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295691 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe" gracePeriod=15 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295653 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406" gracePeriod=15 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.295776 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0" gracePeriod=15 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297256 4632 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297493 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297510 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297519 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297527 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297539 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297545 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297552 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297557 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297563 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297570 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.297578 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297583 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297672 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297681 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297692 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297699 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.297706 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.339381 4632 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.26.49:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: E1201 06:46:59.356113 4632 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 192.168.26.49:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-r52z6.187d0489e86dc659 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-r52z6,UID:1468035d-a3bd-4465-9fee-f27f6f7d4d7e,APIVersion:v1,ResourceVersion:29663,FieldPath:spec.containers{registry-server},},Reason:Created,Message:Created container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:46:59.355641433 +0000 UTC m=+228.920654407,LastTimestamp:2025-12-01 06:46:59.355641433 +0000 UTC m=+228.920654407,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.436818 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.436884 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.436905 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.436964 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.436985 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.437032 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.437059 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.437111 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538135 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538186 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538208 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538227 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538259 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538280 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538295 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538322 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538372 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538401 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538441 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538451 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538462 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538492 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538495 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.538540 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.640216 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.640634 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.641236 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406" exitCode=0 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.641264 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0" exitCode=0 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.641272 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b" exitCode=0 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.641278 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe" exitCode=2 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.642651 4632 generic.go:334] "Generic (PLEG): container finished" podID="06aced22-ea91-4173-98a5-df8d91ebe64d" containerID="024e52da2a83419a3730e9f0f4b488118e1ffe5b08e73a4b0cb78d1658a3b588" exitCode=0 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.642735 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"06aced22-ea91-4173-98a5-df8d91ebe64d","Type":"ContainerDied","Data":"024e52da2a83419a3730e9f0f4b488118e1ffe5b08e73a4b0cb78d1658a3b588"} Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.643313 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.643689 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.644880 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r52z6" event={"ID":"1468035d-a3bd-4465-9fee-f27f6f7d4d7e","Type":"ContainerStarted","Data":"a13a9770f02b98d78c78ab3e173cda5c0ae565eb027dffed3d340e7a612cd9fe"} Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.645283 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.645547 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.645859 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.646371 4632 generic.go:334] "Generic (PLEG): container finished" podID="07f1738b-a57a-41f6-909f-0f830c165731" containerID="b9fb0c81ba26a15ec3cad7c89a374d7bc6973327a55ef30a491c6ae78ebb97d0" exitCode=0 Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.646582 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t667l" event={"ID":"07f1738b-a57a-41f6-909f-0f830c165731","Type":"ContainerDied","Data":"b9fb0c81ba26a15ec3cad7c89a374d7bc6973327a55ef30a491c6ae78ebb97d0"} Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.647188 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.647585 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.647882 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: I1201 06:46:59.648132 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:46:59 crc kubenswrapper[4632]: W1201 06:46:59.659380 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-3c8658daa1a3a74197f7e86a2cc21de4fc3add7c8be80c376c848ef859a3ee78 WatchSource:0}: Error finding container 3c8658daa1a3a74197f7e86a2cc21de4fc3add7c8be80c376c848ef859a3ee78: Status 404 returned error can't find the container with id 3c8658daa1a3a74197f7e86a2cc21de4fc3add7c8be80c376c848ef859a3ee78 Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.652711 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"6d3ccbaa1e24dd3fe69b924367e913ced7844c74be2cad892be8a6bac55dc744"} Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.653832 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3c8658daa1a3a74197f7e86a2cc21de4fc3add7c8be80c376c848ef859a3ee78"} Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.654528 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.654623 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-t667l" event={"ID":"07f1738b-a57a-41f6-909f-0f830c165731","Type":"ContainerStarted","Data":"c3062b99acd811688431b92ef2615a34e1094b3ee5e67a21d593975309e6047e"} Dec 01 06:47:00 crc kubenswrapper[4632]: E1201 06:47:00.654555 4632 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.26.49:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.654846 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.655062 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.655293 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.655591 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.655780 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.655953 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.656233 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.752025 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.752255 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.752493 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.752769 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.957744 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.958177 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.958416 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:00 crc kubenswrapper[4632]: I1201 06:47:00.958735 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.157618 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock\") pod \"06aced22-ea91-4173-98a5-df8d91ebe64d\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.157681 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock" (OuterVolumeSpecName: "var-lock") pod "06aced22-ea91-4173-98a5-df8d91ebe64d" (UID: "06aced22-ea91-4173-98a5-df8d91ebe64d"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.157810 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access\") pod \"06aced22-ea91-4173-98a5-df8d91ebe64d\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.157830 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir\") pod \"06aced22-ea91-4173-98a5-df8d91ebe64d\" (UID: \"06aced22-ea91-4173-98a5-df8d91ebe64d\") " Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.158014 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "06aced22-ea91-4173-98a5-df8d91ebe64d" (UID: "06aced22-ea91-4173-98a5-df8d91ebe64d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.158030 4632 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.161900 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "06aced22-ea91-4173-98a5-df8d91ebe64d" (UID: "06aced22-ea91-4173-98a5-df8d91ebe64d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.259838 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/06aced22-ea91-4173-98a5-df8d91ebe64d-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.260067 4632 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/06aced22-ea91-4173-98a5-df8d91ebe64d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.660918 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"06aced22-ea91-4173-98a5-df8d91ebe64d","Type":"ContainerDied","Data":"cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89"} Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.661168 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb5bf7a2c5b93a74a0685ee03188defb31df3a181dfedd5a9df241d83cea4e89" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.660928 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.663392 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.664157 4632 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1" exitCode=0 Dec 01 06:47:01 crc kubenswrapper[4632]: E1201 06:47:01.665439 4632 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.26.49:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.671429 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.671660 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:01 crc kubenswrapper[4632]: I1201 06:47:01.671831 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.023289 4632 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.023655 4632 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.023816 4632 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.023963 4632 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.024349 4632 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.024396 4632 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.024830 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="200ms" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.059652 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.060807 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.061427 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.061975 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.062297 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.062661 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.172861 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173012 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173006 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173053 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173056 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173168 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173452 4632 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173471 4632 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.173479 4632 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.225197 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="400ms" Dec 01 06:47:02 crc kubenswrapper[4632]: E1201 06:47:02.626216 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="800ms" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.673638 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.674225 4632 scope.go:117] "RemoveContainer" containerID="e6c8d0e7997274da4fde6d77aa95738d4373b1e8be0a78d1e162abfa5bc13406" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.674380 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.686043 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.686217 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.686388 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.686676 4632 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.689894 4632 scope.go:117] "RemoveContainer" containerID="a12a0eb3b0896f4eea71c340b9ea63e78eb8e34bee44199cf0704e817c354ee0" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.708111 4632 scope.go:117] "RemoveContainer" containerID="07ec6e155e7f63f435b85516f7e9d3cd2da2655e4797e9dcb6b4825464b1024b" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.719690 4632 scope.go:117] "RemoveContainer" containerID="b304b74da91b1c193d72a1350f64f995c6f23cdd8629e18367331ca50fabdfbe" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.733120 4632 scope.go:117] "RemoveContainer" containerID="dca4f095ddcf95e696f42e72f87879fb758511c2a56918f2e635a467a4968df1" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.744467 4632 scope.go:117] "RemoveContainer" containerID="4208752454a2748d4f3c8f52cbf26dc4c585f0ed0c809b58e6d3676047a3cc5b" Dec 01 06:47:02 crc kubenswrapper[4632]: I1201 06:47:02.759047 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.064422 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.064481 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.095579 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.095987 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.096195 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.096398 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.096605 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.282787 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.282832 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.312193 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.312717 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.313148 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.313461 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.313959 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.314423 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: E1201 06:47:03.427892 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="1.6s" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.710437 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.712038 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.712159 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l4w7t" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.712406 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.712793 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.713262 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.713511 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.713829 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.714107 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.714383 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.714604 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:03 crc kubenswrapper[4632]: I1201 06:47:03.714833 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: E1201 06:47:05.028637 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="3.2s" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.457760 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.457829 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.486923 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.487259 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.487578 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.487861 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.488061 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.488218 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: E1201 06:47:05.661568 4632 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 192.168.26.49:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-r52z6.187d0489e86dc659 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-r52z6,UID:1468035d-a3bd-4465-9fee-f27f6f7d4d7e,APIVersion:v1,ResourceVersion:29663,FieldPath:spec.containers{registry-server},},Reason:Created,Message:Created container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-01 06:46:59.355641433 +0000 UTC m=+228.920654407,LastTimestamp:2025-12-01 06:46:59.355641433 +0000 UTC m=+228.920654407,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.671209 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.671312 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.704232 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.704691 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.704997 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.705245 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.705542 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.705813 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.720300 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r52z6" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.720619 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.720846 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.721117 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.721409 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.721620 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.731861 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-t667l" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.732200 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.732483 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.732700 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.732901 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:05 crc kubenswrapper[4632]: I1201 06:47:05.733105 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:08 crc kubenswrapper[4632]: E1201 06:47:08.229531 4632 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.26.49:6443: connect: connection refused" interval="6.4s" Dec 01 06:47:10 crc kubenswrapper[4632]: I1201 06:47:10.751730 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:10 crc kubenswrapper[4632]: I1201 06:47:10.752025 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:10 crc kubenswrapper[4632]: I1201 06:47:10.752242 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:10 crc kubenswrapper[4632]: I1201 06:47:10.752477 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:10 crc kubenswrapper[4632]: I1201 06:47:10.752726 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.733170 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.733797 4632 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56" exitCode=1 Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.733825 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56"} Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.734135 4632 scope.go:117] "RemoveContainer" containerID="bf8f3caaf1bd546c763446515baa7f22438d0ee949f7d0e45988988c3a4f5b56" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.735041 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.735447 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.735782 4632 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.736007 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.736255 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.736466 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.749806 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.750411 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.750672 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.750908 4632 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.751158 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.751396 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.752275 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.779314 4632 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.779335 4632 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:12 crc kubenswrapper[4632]: E1201 06:47:12.779615 4632 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:12 crc kubenswrapper[4632]: I1201 06:47:12.779888 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:12 crc kubenswrapper[4632]: W1201 06:47:12.818997 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-6114e43f3a1b819855ef81f9688720d9df155f94dfa09102c45c5a613ea90101 WatchSource:0}: Error finding container 6114e43f3a1b819855ef81f9688720d9df155f94dfa09102c45c5a613ea90101: Status 404 returned error can't find the container with id 6114e43f3a1b819855ef81f9688720d9df155f94dfa09102c45c5a613ea90101 Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.740872 4632 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="57c7d7443704eabb09db766f921445f19c2e73392b1bea4b1657e924e34f1dcf" exitCode=0 Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.740965 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"57c7d7443704eabb09db766f921445f19c2e73392b1bea4b1657e924e34f1dcf"} Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.741185 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6114e43f3a1b819855ef81f9688720d9df155f94dfa09102c45c5a613ea90101"} Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.741489 4632 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.741503 4632 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.742114 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: E1201 06:47:13.742477 4632 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.742648 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.742949 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.743234 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.743601 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.743871 4632 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.743941 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.743993 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"193a499ee44ef95e84478d71a17574898b4ea0d92b967f06af52ae2817830791"} Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.744506 4632 status_manager.go:851] "Failed to get status for pod" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.744817 4632 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.745013 4632 status_manager.go:851] "Failed to get status for pod" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" pod="openshift-marketplace/community-operators-mwjfp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-mwjfp\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.745214 4632 status_manager.go:851] "Failed to get status for pod" podUID="1468035d-a3bd-4465-9fee-f27f6f7d4d7e" pod="openshift-marketplace/redhat-marketplace-r52z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r52z6\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.745457 4632 status_manager.go:851] "Failed to get status for pod" podUID="07f1738b-a57a-41f6-909f-0f830c165731" pod="openshift-marketplace/redhat-operators-t667l" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-t667l\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:13 crc kubenswrapper[4632]: I1201 06:47:13.745717 4632 status_manager.go:851] "Failed to get status for pod" podUID="de997f32-b849-4dcd-96e7-de56cfa3ec3d" pod="openshift-marketplace/certified-operators-l4w7t" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-l4w7t\": dial tcp 192.168.26.49:6443: connect: connection refused" Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.761678 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d278989c42223a7f91eb5edc770a0546e52b35bd53e958308dfe4dbbbf948048"} Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.761973 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ba9ef7ceb569db029e0251e1a238428d3ba623b57e12992f76ae7e257f2c2690"} Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.761984 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"5228cc19535dbb65d7550fc4c824246e2ed8ac2bf1a4c69ecd13f7e4e3f501f4"} Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.761993 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"800c9e703ce9f0b0d3cb4bf1c2e8edfdd42d1f4bc2a0d95bc9eef5fca9931e94"} Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.762001 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"67891286d3d31b503640ef3183d0caa8a100f56eb9e83d1992fca37c0682b419"} Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.762226 4632 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.762240 4632 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:14 crc kubenswrapper[4632]: I1201 06:47:14.762559 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:16 crc kubenswrapper[4632]: I1201 06:47:16.374116 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:47:17 crc kubenswrapper[4632]: I1201 06:47:17.780418 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:17 crc kubenswrapper[4632]: I1201 06:47:17.780463 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:17 crc kubenswrapper[4632]: I1201 06:47:17.785759 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:18 crc kubenswrapper[4632]: I1201 06:47:18.338737 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:47:18 crc kubenswrapper[4632]: I1201 06:47:18.342231 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:47:19 crc kubenswrapper[4632]: I1201 06:47:19.869154 4632 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.764275 4632 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fdadc7a7-7473-4879-a469-51befddb27fa" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.778898 4632 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.778924 4632 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.780364 4632 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fdadc7a7-7473-4879-a469-51befddb27fa" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.782786 4632 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://67891286d3d31b503640ef3183d0caa8a100f56eb9e83d1992fca37c0682b419" Dec 01 06:47:20 crc kubenswrapper[4632]: I1201 06:47:20.782816 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:21 crc kubenswrapper[4632]: I1201 06:47:21.784181 4632 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:21 crc kubenswrapper[4632]: I1201 06:47:21.784214 4632 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="e05a383f-53c3-4f52-9c31-1b73ff3122b6" Dec 01 06:47:21 crc kubenswrapper[4632]: I1201 06:47:21.786968 4632 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="fdadc7a7-7473-4879-a469-51befddb27fa" Dec 01 06:47:26 crc kubenswrapper[4632]: I1201 06:47:26.377063 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 01 06:47:29 crc kubenswrapper[4632]: I1201 06:47:29.166143 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 01 06:47:29 crc kubenswrapper[4632]: I1201 06:47:29.842865 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.372437 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.571768 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.571798 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.778488 4632 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.848658 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 01 06:47:30 crc kubenswrapper[4632]: I1201 06:47:30.892800 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.082572 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.153473 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.266154 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.395460 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.700064 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.872088 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 01 06:47:31 crc kubenswrapper[4632]: I1201 06:47:31.923214 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.003296 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.120059 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.201227 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.616804 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.850916 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 01 06:47:32 crc kubenswrapper[4632]: I1201 06:47:32.907962 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.019195 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.050871 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.073322 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.231237 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.304187 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.408787 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.438689 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.547461 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.655561 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.724797 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.788993 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.890020 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.937543 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.945128 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.974663 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 01 06:47:33 crc kubenswrapper[4632]: I1201 06:47:33.995214 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.033703 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.104634 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.106155 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.120443 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.125592 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.155243 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.205107 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.258781 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.342148 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.431252 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.514854 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.522775 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.534789 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.669662 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.801481 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.837014 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.862250 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.895162 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.941341 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 01 06:47:34 crc kubenswrapper[4632]: I1201 06:47:34.951197 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.049064 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.182405 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.384918 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.420905 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.454218 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.456288 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.515034 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.520412 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.599253 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.656395 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.707481 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.717862 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.757501 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.778044 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.828173 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.871229 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.875823 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.912464 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.968898 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 01 06:47:35 crc kubenswrapper[4632]: I1201 06:47:35.987817 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.048286 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.063550 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.132002 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.148108 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.191010 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.191966 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.288605 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.310559 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.338210 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.436274 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.623833 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.773007 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.813002 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:47:36 crc kubenswrapper[4632]: I1201 06:47:36.882807 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.011341 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.068186 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.091453 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.120102 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.120216 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.142395 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.183566 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.194842 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.198087 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.265719 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.318208 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.336733 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.439996 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.497158 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.570124 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.588151 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.622152 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.623436 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.819315 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.889482 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.980331 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.985807 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 01 06:47:37 crc kubenswrapper[4632]: I1201 06:47:37.990936 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.039853 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.055740 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.115721 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.126081 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.160159 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.169518 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.189783 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.360118 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.403422 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.419053 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.462474 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.515899 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.526072 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.590046 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.597673 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.645938 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.708568 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.715982 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.731112 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.780714 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.804268 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.935960 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 01 06:47:38 crc kubenswrapper[4632]: I1201 06:47:38.991802 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.124610 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.234857 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.286169 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.295726 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.300669 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.340605 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.359432 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.378016 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.391112 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.457056 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.536340 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.539997 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.669109 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.743005 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.819602 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.839996 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.865594 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.878535 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.925884 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 01 06:47:39 crc kubenswrapper[4632]: I1201 06:47:39.975755 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.056908 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.147861 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.289756 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.353424 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.395328 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.396820 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.447992 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.469126 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.488426 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.603414 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.657866 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 01 06:47:40 crc kubenswrapper[4632]: I1201 06:47:40.907223 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.015221 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.045660 4632 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.046458 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.113398 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.124215 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.145675 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.153159 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.186671 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.239931 4632 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.242552 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r52z6" podStartSLOduration=43.625642672 podStartE2EDuration="46.242537513s" podCreationTimestamp="2025-12-01 06:46:55 +0000 UTC" firstStartedPulling="2025-12-01 06:46:56.618731973 +0000 UTC m=+226.183744947" lastFinishedPulling="2025-12-01 06:46:59.235626815 +0000 UTC m=+228.800639788" observedRunningTime="2025-12-01 06:47:19.897169835 +0000 UTC m=+249.462182809" watchObservedRunningTime="2025-12-01 06:47:41.242537513 +0000 UTC m=+270.807550476" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.242992 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-t667l" podStartSLOduration=42.687031167 podStartE2EDuration="46.242989035s" podCreationTimestamp="2025-12-01 06:46:55 +0000 UTC" firstStartedPulling="2025-12-01 06:46:56.615038171 +0000 UTC m=+226.180051144" lastFinishedPulling="2025-12-01 06:47:00.170996039 +0000 UTC m=+229.736009012" observedRunningTime="2025-12-01 06:47:19.909599409 +0000 UTC m=+249.474612382" watchObservedRunningTime="2025-12-01 06:47:41.242989035 +0000 UTC m=+270.808002009" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.243623 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.243679 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.247291 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.257977 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=22.257961633 podStartE2EDuration="22.257961633s" podCreationTimestamp="2025-12-01 06:47:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:47:41.257425951 +0000 UTC m=+270.822438925" watchObservedRunningTime="2025-12-01 06:47:41.257961633 +0000 UTC m=+270.822974606" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.293003 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.410045 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.628708 4632 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.636099 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.643689 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.832678 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.902174 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.913855 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.968255 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 01 06:47:41 crc kubenswrapper[4632]: I1201 06:47:41.985648 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.044527 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.052554 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.053638 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.123869 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.182298 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.206252 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.263265 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.269572 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.297514 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.395015 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.396919 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.495037 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.515285 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.527371 4632 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.527638 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://6d3ccbaa1e24dd3fe69b924367e913ced7844c74be2cad892be8a6bac55dc744" gracePeriod=5 Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.545298 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.566218 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.788467 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.885931 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.919707 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.975484 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 01 06:47:42 crc kubenswrapper[4632]: I1201 06:47:42.988934 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.133452 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.164032 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.177928 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.195563 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.199043 4632 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.265120 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.306541 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.576884 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.598649 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.644422 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.687533 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.709550 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.738774 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.782192 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.840093 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 01 06:47:43 crc kubenswrapper[4632]: I1201 06:47:43.850462 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.006612 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.047782 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.084853 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.184110 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.539557 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.549818 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.722029 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.844411 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 01 06:47:44 crc kubenswrapper[4632]: I1201 06:47:44.874705 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.011013 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.203781 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.235907 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.288693 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.613609 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.651165 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 01 06:47:45 crc kubenswrapper[4632]: I1201 06:47:45.946959 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 01 06:47:46 crc kubenswrapper[4632]: I1201 06:47:46.058708 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 01 06:47:46 crc kubenswrapper[4632]: I1201 06:47:46.098841 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 01 06:47:46 crc kubenswrapper[4632]: I1201 06:47:46.386879 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 01 06:47:46 crc kubenswrapper[4632]: I1201 06:47:46.855108 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.149859 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.243778 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.509562 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.693226 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.774417 4632 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.893916 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.903271 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.903322 4632 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="6d3ccbaa1e24dd3fe69b924367e913ced7844c74be2cad892be8a6bac55dc744" exitCode=137 Dec 01 06:47:47 crc kubenswrapper[4632]: I1201 06:47:47.967949 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.078072 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.078137 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140133 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140185 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140220 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140262 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140289 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140295 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140329 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140404 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140515 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140657 4632 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140679 4632 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140697 4632 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.140715 4632 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.146990 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.242078 4632 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.524137 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.754991 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.909138 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.909209 4632 scope.go:117] "RemoveContainer" containerID="6d3ccbaa1e24dd3fe69b924367e913ced7844c74be2cad892be8a6bac55dc744" Dec 01 06:47:48 crc kubenswrapper[4632]: I1201 06:47:48.909280 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.996417 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-27qzb"] Dec 01 06:48:28 crc kubenswrapper[4632]: E1201 06:48:28.997126 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" containerName="installer" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.997139 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" containerName="installer" Dec 01 06:48:28 crc kubenswrapper[4632]: E1201 06:48:28.997159 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.997166 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.997250 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.997258 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="06aced22-ea91-4173-98a5-df8d91ebe64d" containerName="installer" Dec 01 06:48:28 crc kubenswrapper[4632]: I1201 06:48:28.997633 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.008233 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-27qzb"] Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196221 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196303 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-trusted-ca\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196330 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqrd5\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-kube-api-access-sqrd5\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196398 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-tls\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196557 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-certificates\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196621 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-bound-sa-token\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196646 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/18d837e3-87b3-4d23-89b0-e71b25762d56-ca-trust-extracted\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.196701 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/18d837e3-87b3-4d23-89b0-e71b25762d56-installation-pull-secrets\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.214876 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297469 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-trusted-ca\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297521 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqrd5\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-kube-api-access-sqrd5\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297556 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-tls\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297573 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-certificates\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297593 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-bound-sa-token\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297610 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/18d837e3-87b3-4d23-89b0-e71b25762d56-ca-trust-extracted\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.297632 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/18d837e3-87b3-4d23-89b0-e71b25762d56-installation-pull-secrets\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.298224 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/18d837e3-87b3-4d23-89b0-e71b25762d56-ca-trust-extracted\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.298618 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-trusted-ca\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.298754 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-certificates\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.302378 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-registry-tls\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.303759 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/18d837e3-87b3-4d23-89b0-e71b25762d56-installation-pull-secrets\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.310433 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqrd5\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-kube-api-access-sqrd5\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.312685 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/18d837e3-87b3-4d23-89b0-e71b25762d56-bound-sa-token\") pod \"image-registry-66df7c8f76-27qzb\" (UID: \"18d837e3-87b3-4d23-89b0-e71b25762d56\") " pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.611213 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:29 crc kubenswrapper[4632]: I1201 06:48:29.944011 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-27qzb"] Dec 01 06:48:30 crc kubenswrapper[4632]: I1201 06:48:30.080191 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" event={"ID":"18d837e3-87b3-4d23-89b0-e71b25762d56","Type":"ContainerStarted","Data":"6881b910343b1a256574e93c9b233b1f99a69c214258fccfb16806cbf0b61301"} Dec 01 06:48:30 crc kubenswrapper[4632]: I1201 06:48:30.080504 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" event={"ID":"18d837e3-87b3-4d23-89b0-e71b25762d56","Type":"ContainerStarted","Data":"fd55a9636cd94751e65b3f35f57ac16258215d791261c2e6989d27755266097f"} Dec 01 06:48:30 crc kubenswrapper[4632]: I1201 06:48:30.081102 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:30 crc kubenswrapper[4632]: I1201 06:48:30.094248 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" podStartSLOduration=2.09423607 podStartE2EDuration="2.09423607s" podCreationTimestamp="2025-12-01 06:48:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:48:30.093385553 +0000 UTC m=+319.658398526" watchObservedRunningTime="2025-12-01 06:48:30.09423607 +0000 UTC m=+319.659249042" Dec 01 06:48:49 crc kubenswrapper[4632]: I1201 06:48:49.497702 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:48:49 crc kubenswrapper[4632]: I1201 06:48:49.498121 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:48:49 crc kubenswrapper[4632]: I1201 06:48:49.616909 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-27qzb" Dec 01 06:48:49 crc kubenswrapper[4632]: I1201 06:48:49.651238 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:49:14 crc kubenswrapper[4632]: I1201 06:49:14.676169 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" podUID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" containerName="registry" containerID="cri-o://0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782" gracePeriod=30 Dec 01 06:49:14 crc kubenswrapper[4632]: I1201 06:49:14.934533 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019145 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019217 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019346 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019403 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019443 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019470 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rh9d7\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019495 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.019531 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted\") pod \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\" (UID: \"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a\") " Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.020043 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.020301 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.020548 4632 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.020570 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.023951 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.024187 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.024761 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7" (OuterVolumeSpecName: "kube-api-access-rh9d7") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "kube-api-access-rh9d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.024850 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.026411 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.072427 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" (UID: "7cd355d1-fe1f-4c1d-81fa-280d03a6c78a"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.121723 4632 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.121745 4632 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.121758 4632 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.121767 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rh9d7\" (UniqueName: \"kubernetes.io/projected/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-kube-api-access-rh9d7\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.121774 4632 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.244682 4632 generic.go:334] "Generic (PLEG): container finished" podID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" containerID="0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782" exitCode=0 Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.244734 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.244752 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" event={"ID":"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a","Type":"ContainerDied","Data":"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782"} Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.245247 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-w2js2" event={"ID":"7cd355d1-fe1f-4c1d-81fa-280d03a6c78a","Type":"ContainerDied","Data":"b008e23df9addff912e0ee8cdaf50a618c09be9222a9aa0e62ed2bae6ef9f683"} Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.245268 4632 scope.go:117] "RemoveContainer" containerID="0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.259302 4632 scope.go:117] "RemoveContainer" containerID="0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782" Dec 01 06:49:15 crc kubenswrapper[4632]: E1201 06:49:15.259638 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782\": container with ID starting with 0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782 not found: ID does not exist" containerID="0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.259663 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782"} err="failed to get container status \"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782\": rpc error: code = NotFound desc = could not find container \"0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782\": container with ID starting with 0e9afa32116beede97444bfc1ea8094edb35526f1ba8942355838ddaccce7782 not found: ID does not exist" Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.266697 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:49:15 crc kubenswrapper[4632]: I1201 06:49:15.270892 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-w2js2"] Dec 01 06:49:16 crc kubenswrapper[4632]: I1201 06:49:16.754724 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" path="/var/lib/kubelet/pods/7cd355d1-fe1f-4c1d-81fa-280d03a6c78a/volumes" Dec 01 06:49:19 crc kubenswrapper[4632]: I1201 06:49:19.498444 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:49:19 crc kubenswrapper[4632]: I1201 06:49:19.498494 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:49:49 crc kubenswrapper[4632]: I1201 06:49:49.497648 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:49:49 crc kubenswrapper[4632]: I1201 06:49:49.498089 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:49:49 crc kubenswrapper[4632]: I1201 06:49:49.498135 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:49:49 crc kubenswrapper[4632]: I1201 06:49:49.498584 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:49:49 crc kubenswrapper[4632]: I1201 06:49:49.498638 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a" gracePeriod=600 Dec 01 06:49:50 crc kubenswrapper[4632]: I1201 06:49:50.369321 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a" exitCode=0 Dec 01 06:49:50 crc kubenswrapper[4632]: I1201 06:49:50.369698 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a"} Dec 01 06:49:50 crc kubenswrapper[4632]: I1201 06:49:50.369724 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35"} Dec 01 06:49:50 crc kubenswrapper[4632]: I1201 06:49:50.369740 4632 scope.go:117] "RemoveContainer" containerID="1080839a72384cf67a5de1675fdc4d576e23dbc64d78d158343760be897892b9" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.965645 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jmgck"] Dec 01 06:51:22 crc kubenswrapper[4632]: E1201 06:51:22.966271 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" containerName="registry" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.966285 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" containerName="registry" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.966389 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cd355d1-fe1f-4c1d-81fa-280d03a6c78a" containerName="registry" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.966724 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.969242 4632 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-ccmzr" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.969319 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.969377 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.974296 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-dntsb"] Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.974977 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-dntsb" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.976613 4632 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-vqgfd" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.977262 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jmgck"] Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.980157 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjfb2"] Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.980693 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.981977 4632 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-tpz25" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.987329 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-dntsb"] Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.991440 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjfb2"] Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.993814 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q99q\" (UniqueName: \"kubernetes.io/projected/05af64fb-260a-44bb-a7e8-e3b8ffbee656-kube-api-access-8q99q\") pod \"cert-manager-webhook-5655c58dd6-wjfb2\" (UID: \"05af64fb-260a-44bb-a7e8-e3b8ffbee656\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.993855 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg7mn\" (UniqueName: \"kubernetes.io/projected/15ee763c-142f-480b-92b5-6360ed211e21-kube-api-access-lg7mn\") pod \"cert-manager-5b446d88c5-dntsb\" (UID: \"15ee763c-142f-480b-92b5-6360ed211e21\") " pod="cert-manager/cert-manager-5b446d88c5-dntsb" Dec 01 06:51:22 crc kubenswrapper[4632]: I1201 06:51:22.993883 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdsll\" (UniqueName: \"kubernetes.io/projected/91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a-kube-api-access-mdsll\") pod \"cert-manager-cainjector-7f985d654d-jmgck\" (UID: \"91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.094457 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q99q\" (UniqueName: \"kubernetes.io/projected/05af64fb-260a-44bb-a7e8-e3b8ffbee656-kube-api-access-8q99q\") pod \"cert-manager-webhook-5655c58dd6-wjfb2\" (UID: \"05af64fb-260a-44bb-a7e8-e3b8ffbee656\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.094502 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg7mn\" (UniqueName: \"kubernetes.io/projected/15ee763c-142f-480b-92b5-6360ed211e21-kube-api-access-lg7mn\") pod \"cert-manager-5b446d88c5-dntsb\" (UID: \"15ee763c-142f-480b-92b5-6360ed211e21\") " pod="cert-manager/cert-manager-5b446d88c5-dntsb" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.094529 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdsll\" (UniqueName: \"kubernetes.io/projected/91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a-kube-api-access-mdsll\") pod \"cert-manager-cainjector-7f985d654d-jmgck\" (UID: \"91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.109153 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdsll\" (UniqueName: \"kubernetes.io/projected/91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a-kube-api-access-mdsll\") pod \"cert-manager-cainjector-7f985d654d-jmgck\" (UID: \"91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.109142 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg7mn\" (UniqueName: \"kubernetes.io/projected/15ee763c-142f-480b-92b5-6360ed211e21-kube-api-access-lg7mn\") pod \"cert-manager-5b446d88c5-dntsb\" (UID: \"15ee763c-142f-480b-92b5-6360ed211e21\") " pod="cert-manager/cert-manager-5b446d88c5-dntsb" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.109871 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q99q\" (UniqueName: \"kubernetes.io/projected/05af64fb-260a-44bb-a7e8-e3b8ffbee656-kube-api-access-8q99q\") pod \"cert-manager-webhook-5655c58dd6-wjfb2\" (UID: \"05af64fb-260a-44bb-a7e8-e3b8ffbee656\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.283668 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.293790 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-dntsb" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.296991 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.644055 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-dntsb"] Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.649198 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.676250 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jmgck"] Dec 01 06:51:23 crc kubenswrapper[4632]: W1201 06:51:23.679440 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91c9fab4_eb3d_4b68_bf2b_31d5f17c6c0a.slice/crio-481de61d37586a61ab38dee909a3ced33d1bdbd68706a0ac94d4aa8ca29bcedd WatchSource:0}: Error finding container 481de61d37586a61ab38dee909a3ced33d1bdbd68706a0ac94d4aa8ca29bcedd: Status 404 returned error can't find the container with id 481de61d37586a61ab38dee909a3ced33d1bdbd68706a0ac94d4aa8ca29bcedd Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.679686 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wjfb2"] Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.723464 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" event={"ID":"91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a","Type":"ContainerStarted","Data":"481de61d37586a61ab38dee909a3ced33d1bdbd68706a0ac94d4aa8ca29bcedd"} Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.724168 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" event={"ID":"05af64fb-260a-44bb-a7e8-e3b8ffbee656","Type":"ContainerStarted","Data":"a4108163c1ccaeed80e1c9ca4d2a81d2a98695c92cb3e67737f010033474becd"} Dec 01 06:51:23 crc kubenswrapper[4632]: I1201 06:51:23.724817 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-dntsb" event={"ID":"15ee763c-142f-480b-92b5-6360ed211e21","Type":"ContainerStarted","Data":"6bab8701f738bd4194decd4abb6e05ef7041f8d0f89a0cdd9f3215e324b1384e"} Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.737991 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" event={"ID":"91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a","Type":"ContainerStarted","Data":"7bb5a0674df30a86f93c79ad25ef469707a5242d74097818df67bd7609b190d5"} Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.739993 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" event={"ID":"05af64fb-260a-44bb-a7e8-e3b8ffbee656","Type":"ContainerStarted","Data":"1992e8e55901b1f5eebbfddeaee80de21b2fc941f39ea13d3cf362b038b18785"} Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.740059 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.741236 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-dntsb" event={"ID":"15ee763c-142f-480b-92b5-6360ed211e21","Type":"ContainerStarted","Data":"81e14dd0402026b43de6a5ab470c7889cc59d8e3bd6341479e5873dbb8559af7"} Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.749865 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-jmgck" podStartSLOduration=2.691861681 podStartE2EDuration="4.749851348s" podCreationTimestamp="2025-12-01 06:51:22 +0000 UTC" firstStartedPulling="2025-12-01 06:51:23.680592879 +0000 UTC m=+493.245605841" lastFinishedPulling="2025-12-01 06:51:25.738582535 +0000 UTC m=+495.303595508" observedRunningTime="2025-12-01 06:51:26.748113497 +0000 UTC m=+496.313126470" watchObservedRunningTime="2025-12-01 06:51:26.749851348 +0000 UTC m=+496.314864321" Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.759527 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" podStartSLOduration=1.90701557 podStartE2EDuration="4.759512365s" podCreationTimestamp="2025-12-01 06:51:22 +0000 UTC" firstStartedPulling="2025-12-01 06:51:23.679046509 +0000 UTC m=+493.244059483" lastFinishedPulling="2025-12-01 06:51:26.531543314 +0000 UTC m=+496.096556278" observedRunningTime="2025-12-01 06:51:26.757675808 +0000 UTC m=+496.322688781" watchObservedRunningTime="2025-12-01 06:51:26.759512365 +0000 UTC m=+496.324525338" Dec 01 06:51:26 crc kubenswrapper[4632]: I1201 06:51:26.770692 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-dntsb" podStartSLOduration=2.678196657 podStartE2EDuration="4.770675397s" podCreationTimestamp="2025-12-01 06:51:22 +0000 UTC" firstStartedPulling="2025-12-01 06:51:23.64901479 +0000 UTC m=+493.214027763" lastFinishedPulling="2025-12-01 06:51:25.74149353 +0000 UTC m=+495.306506503" observedRunningTime="2025-12-01 06:51:26.768089315 +0000 UTC m=+496.333102289" watchObservedRunningTime="2025-12-01 06:51:26.770675397 +0000 UTC m=+496.335688370" Dec 01 06:51:33 crc kubenswrapper[4632]: I1201 06:51:33.300623 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-wjfb2" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.465968 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gklnd"] Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466487 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-controller" containerID="cri-o://9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466549 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="nbdb" containerID="cri-o://00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466621 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="sbdb" containerID="cri-o://96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466631 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466645 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-node" containerID="cri-o://f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466684 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="northd" containerID="cri-o://fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.466704 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-acl-logging" containerID="cri-o://d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.492576 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" containerID="cri-o://aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" gracePeriod=30 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.715984 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/3.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.717881 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovn-acl-logging/0.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.718264 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovn-controller/0.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.718609 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762063 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nf4kz"] Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762230 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762243 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762253 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="northd" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762259 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="northd" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762266 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762272 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762279 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762284 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762294 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="sbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762299 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="sbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762307 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-node" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762313 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-node" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762320 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762325 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762330 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-acl-logging" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762335 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-acl-logging" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762342 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="nbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762347 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="nbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762368 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kubecfg-setup" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762374 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kubecfg-setup" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762381 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762386 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762392 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762398 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762480 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="sbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762490 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="nbdb" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762498 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762506 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-ovn-metrics" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762512 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-acl-logging" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762519 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="kube-rbac-proxy-node" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762525 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="northd" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762531 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762537 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovn-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762544 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.762626 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762632 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762710 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.762724 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerName="ovnkube-controller" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.763953 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.772478 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/2.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.773008 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/1.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.773053 4632 generic.go:334] "Generic (PLEG): container finished" podID="45a865b5-e289-4d8f-93d3-007d46f49be9" containerID="cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507" exitCode=2 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.773104 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerDied","Data":"cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.773136 4632 scope.go:117] "RemoveContainer" containerID="18efcb24d15fbe83fcbe0ada005e6c48ba2ae989572edc668a0df119176a2340" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.773542 4632 scope.go:117] "RemoveContainer" containerID="cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.773727 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-zpkn8_openshift-multus(45a865b5-e289-4d8f-93d3-007d46f49be9)\"" pod="openshift-multus/multus-zpkn8" podUID="45a865b5-e289-4d8f-93d3-007d46f49be9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.776852 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovnkube-controller/3.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.779098 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovn-acl-logging/0.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.779581 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-gklnd_ac685f74-ea0b-4a05-8018-a68fc1df20cc/ovn-controller/0.log" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780232 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780250 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780258 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780265 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780271 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780277 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" exitCode=0 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780282 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" exitCode=143 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780289 4632 generic.go:334] "Generic (PLEG): container finished" podID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" exitCode=143 Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780324 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780374 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780390 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780401 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780409 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780416 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780425 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780452 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780458 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780463 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780468 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780472 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780477 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780481 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780485 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780489 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780496 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780503 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780508 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780532 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780538 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780542 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780547 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780552 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780557 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780561 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780566 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780572 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780579 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780584 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780589 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780611 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780617 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780621 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780626 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780632 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780638 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780642 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780649 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" event={"ID":"ac685f74-ea0b-4a05-8018-a68fc1df20cc","Type":"ContainerDied","Data":"825340cbe67a037e0f6a07f5b18f091693654210e4eb3c5b64afdc6f1103530f"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780656 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780662 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780666 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780688 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780693 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780698 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780702 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780707 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780711 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780716 4632 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.780730 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-gklnd" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.796731 4632 scope.go:117] "RemoveContainer" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.808236 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.815721 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-netns\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.815859 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-config\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.815920 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-ovn\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.815936 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-netd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.815979 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-log-socket\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816009 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-slash\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816034 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816064 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovn-node-metrics-cert\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816086 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-env-overrides\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816108 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-var-lib-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816124 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-bin\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816142 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-node-log\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816157 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92qzr\" (UniqueName: \"kubernetes.io/projected/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-kube-api-access-92qzr\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816172 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816201 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-kubelet\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816241 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-script-lib\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816259 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816308 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-systemd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816343 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-etc-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.816395 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-systemd-units\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.824060 4632 scope.go:117] "RemoveContainer" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.835004 4632 scope.go:117] "RemoveContainer" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.844093 4632 scope.go:117] "RemoveContainer" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.852217 4632 scope.go:117] "RemoveContainer" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.861482 4632 scope.go:117] "RemoveContainer" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.869540 4632 scope.go:117] "RemoveContainer" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.880006 4632 scope.go:117] "RemoveContainer" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.889303 4632 scope.go:117] "RemoveContainer" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.898579 4632 scope.go:117] "RemoveContainer" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.898875 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": container with ID starting with aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37 not found: ID does not exist" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.898906 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} err="failed to get container status \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": rpc error: code = NotFound desc = could not find container \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": container with ID starting with aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.898926 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.899184 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": container with ID starting with 22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412 not found: ID does not exist" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899218 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} err="failed to get container status \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": rpc error: code = NotFound desc = could not find container \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": container with ID starting with 22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899240 4632 scope.go:117] "RemoveContainer" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.899577 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": container with ID starting with 96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a not found: ID does not exist" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899604 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} err="failed to get container status \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": rpc error: code = NotFound desc = could not find container \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": container with ID starting with 96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899616 4632 scope.go:117] "RemoveContainer" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.899858 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": container with ID starting with 00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9 not found: ID does not exist" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899879 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} err="failed to get container status \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": rpc error: code = NotFound desc = could not find container \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": container with ID starting with 00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.899892 4632 scope.go:117] "RemoveContainer" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.900072 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": container with ID starting with fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe not found: ID does not exist" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900092 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} err="failed to get container status \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": rpc error: code = NotFound desc = could not find container \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": container with ID starting with fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900104 4632 scope.go:117] "RemoveContainer" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.900284 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": container with ID starting with 0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76 not found: ID does not exist" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900303 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} err="failed to get container status \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": rpc error: code = NotFound desc = could not find container \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": container with ID starting with 0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900315 4632 scope.go:117] "RemoveContainer" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.900530 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": container with ID starting with f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6 not found: ID does not exist" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900552 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} err="failed to get container status \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": rpc error: code = NotFound desc = could not find container \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": container with ID starting with f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900566 4632 scope.go:117] "RemoveContainer" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.900754 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": container with ID starting with d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820 not found: ID does not exist" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900785 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} err="failed to get container status \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": rpc error: code = NotFound desc = could not find container \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": container with ID starting with d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.900801 4632 scope.go:117] "RemoveContainer" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.900989 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": container with ID starting with 9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288 not found: ID does not exist" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901009 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} err="failed to get container status \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": rpc error: code = NotFound desc = could not find container \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": container with ID starting with 9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901022 4632 scope.go:117] "RemoveContainer" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: E1201 06:51:34.901216 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": container with ID starting with 0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209 not found: ID does not exist" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901237 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} err="failed to get container status \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": rpc error: code = NotFound desc = could not find container \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": container with ID starting with 0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901252 4632 scope.go:117] "RemoveContainer" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901510 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} err="failed to get container status \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": rpc error: code = NotFound desc = could not find container \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": container with ID starting with aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901530 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901706 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} err="failed to get container status \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": rpc error: code = NotFound desc = could not find container \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": container with ID starting with 22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901724 4632 scope.go:117] "RemoveContainer" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901888 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} err="failed to get container status \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": rpc error: code = NotFound desc = could not find container \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": container with ID starting with 96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.901905 4632 scope.go:117] "RemoveContainer" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902078 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} err="failed to get container status \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": rpc error: code = NotFound desc = could not find container \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": container with ID starting with 00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902099 4632 scope.go:117] "RemoveContainer" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902294 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} err="failed to get container status \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": rpc error: code = NotFound desc = could not find container \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": container with ID starting with fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902311 4632 scope.go:117] "RemoveContainer" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902575 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} err="failed to get container status \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": rpc error: code = NotFound desc = could not find container \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": container with ID starting with 0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902599 4632 scope.go:117] "RemoveContainer" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902933 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} err="failed to get container status \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": rpc error: code = NotFound desc = could not find container \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": container with ID starting with f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.902948 4632 scope.go:117] "RemoveContainer" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903133 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} err="failed to get container status \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": rpc error: code = NotFound desc = could not find container \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": container with ID starting with d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903155 4632 scope.go:117] "RemoveContainer" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903382 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} err="failed to get container status \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": rpc error: code = NotFound desc = could not find container \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": container with ID starting with 9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903402 4632 scope.go:117] "RemoveContainer" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903622 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} err="failed to get container status \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": rpc error: code = NotFound desc = could not find container \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": container with ID starting with 0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903643 4632 scope.go:117] "RemoveContainer" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903853 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} err="failed to get container status \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": rpc error: code = NotFound desc = could not find container \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": container with ID starting with aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.903874 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904052 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} err="failed to get container status \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": rpc error: code = NotFound desc = could not find container \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": container with ID starting with 22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904074 4632 scope.go:117] "RemoveContainer" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904251 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} err="failed to get container status \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": rpc error: code = NotFound desc = could not find container \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": container with ID starting with 96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904268 4632 scope.go:117] "RemoveContainer" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904458 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} err="failed to get container status \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": rpc error: code = NotFound desc = could not find container \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": container with ID starting with 00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904477 4632 scope.go:117] "RemoveContainer" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904654 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} err="failed to get container status \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": rpc error: code = NotFound desc = could not find container \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": container with ID starting with fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904676 4632 scope.go:117] "RemoveContainer" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904865 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} err="failed to get container status \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": rpc error: code = NotFound desc = could not find container \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": container with ID starting with 0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.904884 4632 scope.go:117] "RemoveContainer" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905058 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} err="failed to get container status \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": rpc error: code = NotFound desc = could not find container \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": container with ID starting with f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905077 4632 scope.go:117] "RemoveContainer" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905244 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} err="failed to get container status \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": rpc error: code = NotFound desc = could not find container \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": container with ID starting with d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905261 4632 scope.go:117] "RemoveContainer" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905464 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} err="failed to get container status \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": rpc error: code = NotFound desc = could not find container \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": container with ID starting with 9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905484 4632 scope.go:117] "RemoveContainer" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905659 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} err="failed to get container status \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": rpc error: code = NotFound desc = could not find container \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": container with ID starting with 0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905678 4632 scope.go:117] "RemoveContainer" containerID="aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905848 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37"} err="failed to get container status \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": rpc error: code = NotFound desc = could not find container \"aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37\": container with ID starting with aeae56f7165749777a42f9aae81d9e3a39f971e8b7827b194ee72b65c5393d37 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.905865 4632 scope.go:117] "RemoveContainer" containerID="22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906017 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412"} err="failed to get container status \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": rpc error: code = NotFound desc = could not find container \"22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412\": container with ID starting with 22964e9a215c5763850a572df1b697f0e5738f1bac8adecd06fccfbf9b273412 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906044 4632 scope.go:117] "RemoveContainer" containerID="96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906197 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a"} err="failed to get container status \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": rpc error: code = NotFound desc = could not find container \"96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a\": container with ID starting with 96141867258a7766cf2debf38e892dc6c7c7df3705078e57d10b60693cb8110a not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906217 4632 scope.go:117] "RemoveContainer" containerID="00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906383 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9"} err="failed to get container status \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": rpc error: code = NotFound desc = could not find container \"00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9\": container with ID starting with 00af4690e6644f4587604b868e2a2030aabe3488ec04851726929f62c5ab6cf9 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906399 4632 scope.go:117] "RemoveContainer" containerID="fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906630 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe"} err="failed to get container status \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": rpc error: code = NotFound desc = could not find container \"fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe\": container with ID starting with fa1405ac42f727bb7e82477a9fad538bdb02792836825417c225263761f3c8fe not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906647 4632 scope.go:117] "RemoveContainer" containerID="0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906836 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76"} err="failed to get container status \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": rpc error: code = NotFound desc = could not find container \"0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76\": container with ID starting with 0c9c64f7eacca9cd38afd836f980bbdadb5e4c277ee8daa8cc861f64149a1a76 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.906857 4632 scope.go:117] "RemoveContainer" containerID="f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907042 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6"} err="failed to get container status \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": rpc error: code = NotFound desc = could not find container \"f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6\": container with ID starting with f9c9ded88f62c8a19bb22ebed97481f0674f9075b6adf847d0cf3ddda06082e6 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907059 4632 scope.go:117] "RemoveContainer" containerID="d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907228 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820"} err="failed to get container status \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": rpc error: code = NotFound desc = could not find container \"d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820\": container with ID starting with d06d921f36e7a4cefea0ec8ba7d6e47e7fb92b0c471b3aa7deea93aefa858820 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907249 4632 scope.go:117] "RemoveContainer" containerID="9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907434 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288"} err="failed to get container status \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": rpc error: code = NotFound desc = could not find container \"9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288\": container with ID starting with 9592c086f52c6d9f7ac9dfec04e96e8edc234c907bb5655c13df83df909b9288 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907453 4632 scope.go:117] "RemoveContainer" containerID="0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.907644 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209"} err="failed to get container status \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": rpc error: code = NotFound desc = could not find container \"0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209\": container with ID starting with 0f4475d38e40fccb0a21ec3c762a6414ac9bd5b81f6359b094296dbe51cd6209 not found: ID does not exist" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917027 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917054 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917073 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917087 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917127 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917153 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917171 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917196 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917212 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917228 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917240 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917257 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917269 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917286 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917301 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917317 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917333 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917347 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917386 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917414 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-622pg\" (UniqueName: \"kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg\") pod \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\" (UID: \"ac685f74-ea0b-4a05-8018-a68fc1df20cc\") " Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917500 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log" (OuterVolumeSpecName: "node-log") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917503 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917534 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917538 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917559 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917579 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917578 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917601 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917583 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917636 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917640 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917621 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash" (OuterVolumeSpecName: "host-slash") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917656 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917622 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket" (OuterVolumeSpecName: "log-socket") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917661 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917778 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-systemd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917846 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-etc-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917871 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-systemd-units\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917910 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-netns\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917949 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-config\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917981 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-ovn\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917999 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-netd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.917999 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-etc-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918031 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-systemd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918044 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-log-socket\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918061 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-ovn\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918075 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-slash\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918084 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-netd\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918097 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918106 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-log-socket\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918103 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918137 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-systemd-units\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918144 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovn-node-metrics-cert\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918153 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-slash\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918169 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-env-overrides\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918176 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-run-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918220 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-var-lib-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918241 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-bin\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918255 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918264 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-node-log\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918159 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-netns\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918296 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-var-lib-openvswitch\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918281 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92qzr\" (UniqueName: \"kubernetes.io/projected/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-kube-api-access-92qzr\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918331 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-cni-bin\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918337 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918388 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-kubelet\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918426 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-script-lib\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918451 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918505 4632 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918517 4632 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918518 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-kubelet\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918527 4632 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918551 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-node-log\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918555 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-run-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918561 4632 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918561 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918573 4632 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918611 4632 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918622 4632 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918630 4632 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918638 4632 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-log-socket\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918647 4632 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918647 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-config\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.918941 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-env-overrides\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919197 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovnkube-script-lib\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919309 4632 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919430 4632 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-slash\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919524 4632 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919602 4632 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919752 4632 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.919937 4632 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-node-log\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.920037 4632 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.921460 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-ovn-node-metrics-cert\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.921540 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg" (OuterVolumeSpecName: "kube-api-access-622pg") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "kube-api-access-622pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.922989 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.927173 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "ac685f74-ea0b-4a05-8018-a68fc1df20cc" (UID: "ac685f74-ea0b-4a05-8018-a68fc1df20cc"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:51:34 crc kubenswrapper[4632]: I1201 06:51:34.930514 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92qzr\" (UniqueName: \"kubernetes.io/projected/75a6f898-44e8-4dfd-b8f6-38e4535acfbe-kube-api-access-92qzr\") pod \"ovnkube-node-nf4kz\" (UID: \"75a6f898-44e8-4dfd-b8f6-38e4535acfbe\") " pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.021571 4632 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ac685f74-ea0b-4a05-8018-a68fc1df20cc-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.021597 4632 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/ac685f74-ea0b-4a05-8018-a68fc1df20cc-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.021606 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-622pg\" (UniqueName: \"kubernetes.io/projected/ac685f74-ea0b-4a05-8018-a68fc1df20cc-kube-api-access-622pg\") on node \"crc\" DevicePath \"\"" Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.074503 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.101391 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gklnd"] Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.104544 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-gklnd"] Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.786007 4632 generic.go:334] "Generic (PLEG): container finished" podID="75a6f898-44e8-4dfd-b8f6-38e4535acfbe" containerID="e3b0cfe651703a9c6bde9f4a0770081d45235d611a49b8b0622fabe350650a1e" exitCode=0 Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.786084 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerDied","Data":"e3b0cfe651703a9c6bde9f4a0770081d45235d611a49b8b0622fabe350650a1e"} Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.786290 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"ef433f0145a4250b07696695e753c6002adb2211d8fe3f88aec9b0ccdc0f47ff"} Dec 01 06:51:35 crc kubenswrapper[4632]: I1201 06:51:35.789195 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/2.log" Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.756096 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac685f74-ea0b-4a05-8018-a68fc1df20cc" path="/var/lib/kubelet/pods/ac685f74-ea0b-4a05-8018-a68fc1df20cc/volumes" Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795485 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"61209d72ed145e73c7cd9faaf4060ff44262248868ca9db1ebce79fe31f2a4a6"} Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795522 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"894b1661d11cbd918ebeea747af6bafc649527ca49ece36ff98dc570a3274c22"} Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795533 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"97acd8cd1c45c07a68f7de2a733eb486f9e9276de693f6f2e5ad8c8bf8045a37"} Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795541 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"1b19467d2b5dfba1e69cc81990ab9d21bf3f5591600fbd3386b4d71d3bf4a51b"} Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795548 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"40bfb10cc1cd88a033200005cdcb206f83860d84e99176eadd1c6879726c033f"} Dec 01 06:51:36 crc kubenswrapper[4632]: I1201 06:51:36.795556 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"9cb4de3d60953870be9da67a8b9b7c3d906d065b11d306567cd3a1b50f490c82"} Dec 01 06:51:38 crc kubenswrapper[4632]: I1201 06:51:38.807433 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"da366553de9b9312e04b2422b94723a190ca84fea7431726a9bbac4160859f12"} Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.819441 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" event={"ID":"75a6f898-44e8-4dfd-b8f6-38e4535acfbe","Type":"ContainerStarted","Data":"e98f0933a3ee8abf950fd08ac41c4b215bcc26c015a4952963501061bbb27203"} Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.819792 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.819841 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.819851 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.838694 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.838869 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:51:40 crc kubenswrapper[4632]: I1201 06:51:40.840585 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" podStartSLOduration=6.840574714 podStartE2EDuration="6.840574714s" podCreationTimestamp="2025-12-01 06:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:51:40.840128381 +0000 UTC m=+510.405141364" watchObservedRunningTime="2025-12-01 06:51:40.840574714 +0000 UTC m=+510.405587686" Dec 01 06:51:46 crc kubenswrapper[4632]: I1201 06:51:46.750401 4632 scope.go:117] "RemoveContainer" containerID="cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507" Dec 01 06:51:46 crc kubenswrapper[4632]: E1201 06:51:46.750959 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-zpkn8_openshift-multus(45a865b5-e289-4d8f-93d3-007d46f49be9)\"" pod="openshift-multus/multus-zpkn8" podUID="45a865b5-e289-4d8f-93d3-007d46f49be9" Dec 01 06:51:49 crc kubenswrapper[4632]: I1201 06:51:49.498520 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:51:49 crc kubenswrapper[4632]: I1201 06:51:49.498797 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:51:58 crc kubenswrapper[4632]: I1201 06:51:58.750125 4632 scope.go:117] "RemoveContainer" containerID="cfb39e8bfd8f275cce2681160b6aed0ce7a5dad52b4c03ecd59c1dd0bc98f507" Dec 01 06:51:58 crc kubenswrapper[4632]: I1201 06:51:58.882982 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-zpkn8_45a865b5-e289-4d8f-93d3-007d46f49be9/kube-multus/2.log" Dec 01 06:51:58 crc kubenswrapper[4632]: I1201 06:51:58.883032 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-zpkn8" event={"ID":"45a865b5-e289-4d8f-93d3-007d46f49be9","Type":"ContainerStarted","Data":"82376f42dd2a6bd552468219a8ccb3e3e8c371c40864e3e81d6968471794452d"} Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.372083 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx"] Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.373734 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.375369 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.375502 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.375570 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj6xm\" (UniqueName: \"kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.375507 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.380414 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx"] Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.476261 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.476334 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.476388 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj6xm\" (UniqueName: \"kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.476645 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.476696 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.490706 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj6xm\" (UniqueName: \"kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:00 crc kubenswrapper[4632]: I1201 06:52:00.684996 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:01 crc kubenswrapper[4632]: I1201 06:52:01.019380 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx"] Dec 01 06:52:01 crc kubenswrapper[4632]: W1201 06:52:01.022199 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2c9f938_68bc_4efd_8547_45136745c6c6.slice/crio-95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b WatchSource:0}: Error finding container 95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b: Status 404 returned error can't find the container with id 95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b Dec 01 06:52:01 crc kubenswrapper[4632]: I1201 06:52:01.896273 4632 generic.go:334] "Generic (PLEG): container finished" podID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerID="6fcf5f2bd5cd4f3c8f7a68a78e0602a556e1662d28e8a9c0a9c54ebc46c20c74" exitCode=0 Dec 01 06:52:01 crc kubenswrapper[4632]: I1201 06:52:01.896375 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerDied","Data":"6fcf5f2bd5cd4f3c8f7a68a78e0602a556e1662d28e8a9c0a9c54ebc46c20c74"} Dec 01 06:52:01 crc kubenswrapper[4632]: I1201 06:52:01.896496 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerStarted","Data":"95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b"} Dec 01 06:52:03 crc kubenswrapper[4632]: I1201 06:52:03.906789 4632 generic.go:334] "Generic (PLEG): container finished" podID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerID="1c397642f77b0be8849a4d54651feae8c9649ad1f46bb03695e078dcb7a5cdd8" exitCode=0 Dec 01 06:52:03 crc kubenswrapper[4632]: I1201 06:52:03.906888 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerDied","Data":"1c397642f77b0be8849a4d54651feae8c9649ad1f46bb03695e078dcb7a5cdd8"} Dec 01 06:52:04 crc kubenswrapper[4632]: I1201 06:52:04.912335 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerStarted","Data":"a18b0d61997a1941b3bcde5b15777217212b91e28028a6d5886e7405dd77821f"} Dec 01 06:52:04 crc kubenswrapper[4632]: I1201 06:52:04.924644 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" podStartSLOduration=3.753111697 podStartE2EDuration="4.92462648s" podCreationTimestamp="2025-12-01 06:52:00 +0000 UTC" firstStartedPulling="2025-12-01 06:52:01.897461777 +0000 UTC m=+531.462474750" lastFinishedPulling="2025-12-01 06:52:03.06897656 +0000 UTC m=+532.633989533" observedRunningTime="2025-12-01 06:52:04.923172364 +0000 UTC m=+534.488185339" watchObservedRunningTime="2025-12-01 06:52:04.92462648 +0000 UTC m=+534.489639452" Dec 01 06:52:05 crc kubenswrapper[4632]: I1201 06:52:05.088897 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nf4kz" Dec 01 06:52:05 crc kubenswrapper[4632]: I1201 06:52:05.917678 4632 generic.go:334] "Generic (PLEG): container finished" podID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerID="a18b0d61997a1941b3bcde5b15777217212b91e28028a6d5886e7405dd77821f" exitCode=0 Dec 01 06:52:05 crc kubenswrapper[4632]: I1201 06:52:05.917736 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerDied","Data":"a18b0d61997a1941b3bcde5b15777217212b91e28028a6d5886e7405dd77821f"} Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.112138 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.241086 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle\") pod \"a2c9f938-68bc-4efd-8547-45136745c6c6\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.241146 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util\") pod \"a2c9f938-68bc-4efd-8547-45136745c6c6\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.241167 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj6xm\" (UniqueName: \"kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm\") pod \"a2c9f938-68bc-4efd-8547-45136745c6c6\" (UID: \"a2c9f938-68bc-4efd-8547-45136745c6c6\") " Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.241557 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle" (OuterVolumeSpecName: "bundle") pod "a2c9f938-68bc-4efd-8547-45136745c6c6" (UID: "a2c9f938-68bc-4efd-8547-45136745c6c6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.245074 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm" (OuterVolumeSpecName: "kube-api-access-kj6xm") pod "a2c9f938-68bc-4efd-8547-45136745c6c6" (UID: "a2c9f938-68bc-4efd-8547-45136745c6c6"). InnerVolumeSpecName "kube-api-access-kj6xm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.248363 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util" (OuterVolumeSpecName: "util") pod "a2c9f938-68bc-4efd-8547-45136745c6c6" (UID: "a2c9f938-68bc-4efd-8547-45136745c6c6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.342083 4632 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.342108 4632 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a2c9f938-68bc-4efd-8547-45136745c6c6-util\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.342117 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj6xm\" (UniqueName: \"kubernetes.io/projected/a2c9f938-68bc-4efd-8547-45136745c6c6-kube-api-access-kj6xm\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.934174 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" event={"ID":"a2c9f938-68bc-4efd-8547-45136745c6c6","Type":"ContainerDied","Data":"95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b"} Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.934209 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95b3fb072efc39ebb65c03f677ae3ebe412c534977f14b82811e1e7c1940b51b" Dec 01 06:52:07 crc kubenswrapper[4632]: I1201 06:52:07.934215 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.834746 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m"] Dec 01 06:52:11 crc kubenswrapper[4632]: E1201 06:52:11.835221 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="util" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.835233 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="util" Dec 01 06:52:11 crc kubenswrapper[4632]: E1201 06:52:11.835241 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="pull" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.835246 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="pull" Dec 01 06:52:11 crc kubenswrapper[4632]: E1201 06:52:11.835256 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="extract" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.835261 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="extract" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.835380 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2c9f938-68bc-4efd-8547-45136745c6c6" containerName="extract" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.835715 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.838762 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-j5qzt" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.839048 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.845988 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m"] Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.846868 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.883807 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8bsr\" (UniqueName: \"kubernetes.io/projected/7fcf49f3-19eb-41a7-b095-ceea8b76f9bd-kube-api-access-j8bsr\") pod \"nmstate-operator-5b5b58f5c8-w7h9m\" (UID: \"7fcf49f3-19eb-41a7-b095-ceea8b76f9bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" Dec 01 06:52:11 crc kubenswrapper[4632]: I1201 06:52:11.985285 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8bsr\" (UniqueName: \"kubernetes.io/projected/7fcf49f3-19eb-41a7-b095-ceea8b76f9bd-kube-api-access-j8bsr\") pod \"nmstate-operator-5b5b58f5c8-w7h9m\" (UID: \"7fcf49f3-19eb-41a7-b095-ceea8b76f9bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" Dec 01 06:52:12 crc kubenswrapper[4632]: I1201 06:52:12.000763 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8bsr\" (UniqueName: \"kubernetes.io/projected/7fcf49f3-19eb-41a7-b095-ceea8b76f9bd-kube-api-access-j8bsr\") pod \"nmstate-operator-5b5b58f5c8-w7h9m\" (UID: \"7fcf49f3-19eb-41a7-b095-ceea8b76f9bd\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" Dec 01 06:52:12 crc kubenswrapper[4632]: I1201 06:52:12.148219 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" Dec 01 06:52:12 crc kubenswrapper[4632]: I1201 06:52:12.286600 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m"] Dec 01 06:52:12 crc kubenswrapper[4632]: W1201 06:52:12.292754 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fcf49f3_19eb_41a7_b095_ceea8b76f9bd.slice/crio-4b7240878cf7d1293e7fd6c6a83b00cfd6640e48d9c6e103959b44073206e555 WatchSource:0}: Error finding container 4b7240878cf7d1293e7fd6c6a83b00cfd6640e48d9c6e103959b44073206e555: Status 404 returned error can't find the container with id 4b7240878cf7d1293e7fd6c6a83b00cfd6640e48d9c6e103959b44073206e555 Dec 01 06:52:12 crc kubenswrapper[4632]: I1201 06:52:12.953112 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" event={"ID":"7fcf49f3-19eb-41a7-b095-ceea8b76f9bd","Type":"ContainerStarted","Data":"4b7240878cf7d1293e7fd6c6a83b00cfd6640e48d9c6e103959b44073206e555"} Dec 01 06:52:14 crc kubenswrapper[4632]: I1201 06:52:14.962144 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" event={"ID":"7fcf49f3-19eb-41a7-b095-ceea8b76f9bd","Type":"ContainerStarted","Data":"053542cd997a5c5bb628d5a6b768c45f8e172e50ddf415a0f949b2e905ba21b3"} Dec 01 06:52:14 crc kubenswrapper[4632]: I1201 06:52:14.997648 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-w7h9m" podStartSLOduration=2.15316667 podStartE2EDuration="3.997630349s" podCreationTimestamp="2025-12-01 06:52:11 +0000 UTC" firstStartedPulling="2025-12-01 06:52:12.295168213 +0000 UTC m=+541.860181187" lastFinishedPulling="2025-12-01 06:52:14.139631893 +0000 UTC m=+543.704644866" observedRunningTime="2025-12-01 06:52:14.996045599 +0000 UTC m=+544.561058571" watchObservedRunningTime="2025-12-01 06:52:14.997630349 +0000 UTC m=+544.562643321" Dec 01 06:52:19 crc kubenswrapper[4632]: I1201 06:52:19.498445 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:52:19 crc kubenswrapper[4632]: I1201 06:52:19.498786 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.600369 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.601094 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.602436 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-sfr2c" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.609783 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.610496 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.613321 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.613700 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.624471 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-qjq94"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.625054 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.630484 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686170 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2sl2\" (UniqueName: \"kubernetes.io/projected/3b03b7ed-47ab-4ae0-95e7-ed1c830fe065-kube-api-access-g2sl2\") pod \"nmstate-metrics-7f946cbc9-ngl6v\" (UID: \"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686233 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-nmstate-lock\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686254 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-dbus-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686395 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-ovs-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686556 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c4wm\" (UniqueName: \"kubernetes.io/projected/f517f423-af69-4a25-a169-e71268fa0ca3-kube-api-access-4c4wm\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686685 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.686718 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmqf5\" (UniqueName: \"kubernetes.io/projected/70be3201-d4ba-4c07-950e-527ad7d2024d-kube-api-access-dmqf5\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.704504 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.705216 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.706658 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.706884 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.707094 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-mv8g4" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.717130 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788023 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c4wm\" (UniqueName: \"kubernetes.io/projected/f517f423-af69-4a25-a169-e71268fa0ca3-kube-api-access-4c4wm\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788092 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d66d298-9ddf-440c-ace2-14c38dc309b0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788139 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788157 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6d66d298-9ddf-440c-ace2-14c38dc309b0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788178 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmqf5\" (UniqueName: \"kubernetes.io/projected/70be3201-d4ba-4c07-950e-527ad7d2024d-kube-api-access-dmqf5\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788194 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2sl2\" (UniqueName: \"kubernetes.io/projected/3b03b7ed-47ab-4ae0-95e7-ed1c830fe065-kube-api-access-g2sl2\") pod \"nmstate-metrics-7f946cbc9-ngl6v\" (UID: \"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788218 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-nmstate-lock\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788232 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-dbus-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788255 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-ovs-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788288 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5hht\" (UniqueName: \"kubernetes.io/projected/6d66d298-9ddf-440c-ace2-14c38dc309b0-kube-api-access-x5hht\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788320 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-nmstate-lock\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788347 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-ovs-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: E1201 06:52:20.788328 4632 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 01 06:52:20 crc kubenswrapper[4632]: E1201 06:52:20.788450 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair podName:70be3201-d4ba-4c07-950e-527ad7d2024d nodeName:}" failed. No retries permitted until 2025-12-01 06:52:21.288431866 +0000 UTC m=+550.853444840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-hmmp7" (UID: "70be3201-d4ba-4c07-950e-527ad7d2024d") : secret "openshift-nmstate-webhook" not found Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.788706 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f517f423-af69-4a25-a169-e71268fa0ca3-dbus-socket\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.806012 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c4wm\" (UniqueName: \"kubernetes.io/projected/f517f423-af69-4a25-a169-e71268fa0ca3-kube-api-access-4c4wm\") pod \"nmstate-handler-qjq94\" (UID: \"f517f423-af69-4a25-a169-e71268fa0ca3\") " pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.806079 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmqf5\" (UniqueName: \"kubernetes.io/projected/70be3201-d4ba-4c07-950e-527ad7d2024d-kube-api-access-dmqf5\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.811577 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2sl2\" (UniqueName: \"kubernetes.io/projected/3b03b7ed-47ab-4ae0-95e7-ed1c830fe065-kube-api-access-g2sl2\") pod \"nmstate-metrics-7f946cbc9-ngl6v\" (UID: \"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.870944 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5554869996-s6j48"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.871607 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.881988 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5554869996-s6j48"] Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.889778 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d66d298-9ddf-440c-ace2-14c38dc309b0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.889836 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbx2t\" (UniqueName: \"kubernetes.io/projected/750ceaf2-6038-448f-8141-1a2014d81e34-kube-api-access-pbx2t\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.889990 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6d66d298-9ddf-440c-ace2-14c38dc309b0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890062 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-console-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890087 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-trusted-ca-bundle\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890119 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-service-ca\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890139 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5hht\" (UniqueName: \"kubernetes.io/projected/6d66d298-9ddf-440c-ace2-14c38dc309b0-kube-api-access-x5hht\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890217 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-oauth-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890301 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890399 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-oauth-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.890722 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/6d66d298-9ddf-440c-ace2-14c38dc309b0-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.892635 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d66d298-9ddf-440c-ace2-14c38dc309b0-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.904831 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5hht\" (UniqueName: \"kubernetes.io/projected/6d66d298-9ddf-440c-ace2-14c38dc309b0-kube-api-access-x5hht\") pod \"nmstate-console-plugin-7fbb5f6569-m9dcd\" (UID: \"6d66d298-9ddf-440c-ace2-14c38dc309b0\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.915330 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.933831 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991006 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-qjq94" event={"ID":"f517f423-af69-4a25-a169-e71268fa0ca3","Type":"ContainerStarted","Data":"82fbe90bb81f04e2d642affa7d47a55c499b429665096e8505e036d0d43adf1c"} Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991047 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-oauth-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991302 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbx2t\" (UniqueName: \"kubernetes.io/projected/750ceaf2-6038-448f-8141-1a2014d81e34-kube-api-access-pbx2t\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991381 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-console-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991401 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-trusted-ca-bundle\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991427 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-service-ca\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991447 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-oauth-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.991489 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.992032 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-oauth-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.992400 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-trusted-ca-bundle\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.992634 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-console-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.992677 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/750ceaf2-6038-448f-8141-1a2014d81e34-service-ca\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.994965 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-oauth-config\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:20 crc kubenswrapper[4632]: I1201 06:52:20.995320 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/750ceaf2-6038-448f-8141-1a2014d81e34-console-serving-cert\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.005991 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbx2t\" (UniqueName: \"kubernetes.io/projected/750ceaf2-6038-448f-8141-1a2014d81e34-kube-api-access-pbx2t\") pod \"console-5554869996-s6j48\" (UID: \"750ceaf2-6038-448f-8141-1a2014d81e34\") " pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.016857 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.184275 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.263917 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v"] Dec 01 06:52:21 crc kubenswrapper[4632]: W1201 06:52:21.273617 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b03b7ed_47ab_4ae0_95e7_ed1c830fe065.slice/crio-545c56c743dc981d1c2923ec128591a2ded40a1d470487ad0de6aeb4457ff346 WatchSource:0}: Error finding container 545c56c743dc981d1c2923ec128591a2ded40a1d470487ad0de6aeb4457ff346: Status 404 returned error can't find the container with id 545c56c743dc981d1c2923ec128591a2ded40a1d470487ad0de6aeb4457ff346 Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.296052 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.302375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/70be3201-d4ba-4c07-950e-527ad7d2024d-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-hmmp7\" (UID: \"70be3201-d4ba-4c07-950e-527ad7d2024d\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.351658 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd"] Dec 01 06:52:21 crc kubenswrapper[4632]: W1201 06:52:21.354491 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d66d298_9ddf_440c_ace2_14c38dc309b0.slice/crio-86376b4a9234de4af9da1c6d70390de6afaf664ea4bf648df5738c645dd30937 WatchSource:0}: Error finding container 86376b4a9234de4af9da1c6d70390de6afaf664ea4bf648df5738c645dd30937: Status 404 returned error can't find the container with id 86376b4a9234de4af9da1c6d70390de6afaf664ea4bf648df5738c645dd30937 Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.522135 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.547829 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5554869996-s6j48"] Dec 01 06:52:21 crc kubenswrapper[4632]: W1201 06:52:21.550726 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod750ceaf2_6038_448f_8141_1a2014d81e34.slice/crio-d57604c64732efe3ae2441f6949efae3bf886782f69306b35b8c9854b928a5d0 WatchSource:0}: Error finding container d57604c64732efe3ae2441f6949efae3bf886782f69306b35b8c9854b928a5d0: Status 404 returned error can't find the container with id d57604c64732efe3ae2441f6949efae3bf886782f69306b35b8c9854b928a5d0 Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.862851 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7"] Dec 01 06:52:21 crc kubenswrapper[4632]: W1201 06:52:21.868950 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70be3201_d4ba_4c07_950e_527ad7d2024d.slice/crio-275eea75241c57018c518d32131740e646d8d188eccb315e08c12d99a8c03a47 WatchSource:0}: Error finding container 275eea75241c57018c518d32131740e646d8d188eccb315e08c12d99a8c03a47: Status 404 returned error can't find the container with id 275eea75241c57018c518d32131740e646d8d188eccb315e08c12d99a8c03a47 Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.996444 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5554869996-s6j48" event={"ID":"750ceaf2-6038-448f-8141-1a2014d81e34","Type":"ContainerStarted","Data":"4ce18172f91602e59303bee0d4cd29132e8befc247d5d8fbc43add7506efc551"} Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.996569 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5554869996-s6j48" event={"ID":"750ceaf2-6038-448f-8141-1a2014d81e34","Type":"ContainerStarted","Data":"d57604c64732efe3ae2441f6949efae3bf886782f69306b35b8c9854b928a5d0"} Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.997244 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" event={"ID":"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065","Type":"ContainerStarted","Data":"545c56c743dc981d1c2923ec128591a2ded40a1d470487ad0de6aeb4457ff346"} Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.997978 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" event={"ID":"6d66d298-9ddf-440c-ace2-14c38dc309b0","Type":"ContainerStarted","Data":"86376b4a9234de4af9da1c6d70390de6afaf664ea4bf648df5738c645dd30937"} Dec 01 06:52:21 crc kubenswrapper[4632]: I1201 06:52:21.998903 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" event={"ID":"70be3201-d4ba-4c07-950e-527ad7d2024d","Type":"ContainerStarted","Data":"275eea75241c57018c518d32131740e646d8d188eccb315e08c12d99a8c03a47"} Dec 01 06:52:22 crc kubenswrapper[4632]: I1201 06:52:22.010484 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5554869996-s6j48" podStartSLOduration=2.010466321 podStartE2EDuration="2.010466321s" podCreationTimestamp="2025-12-01 06:52:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:52:22.008258624 +0000 UTC m=+551.573271597" watchObservedRunningTime="2025-12-01 06:52:22.010466321 +0000 UTC m=+551.575479294" Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.016067 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-qjq94" event={"ID":"f517f423-af69-4a25-a169-e71268fa0ca3","Type":"ContainerStarted","Data":"fa040fe07d82b07fd111be4909159742bb3f52951465e6ed5399edcccabb7c36"} Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.016124 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.017176 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" event={"ID":"70be3201-d4ba-4c07-950e-527ad7d2024d","Type":"ContainerStarted","Data":"635d0bb4390ceef140eaabaf3730e9ef9061d71955e428b3b85ad7890fe888a1"} Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.017312 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.018724 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" event={"ID":"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065","Type":"ContainerStarted","Data":"36484f3a5cd0a548de8a78c369ecdd98cac344677d9037e90dbae79c642f0939"} Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.019811 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" event={"ID":"6d66d298-9ddf-440c-ace2-14c38dc309b0","Type":"ContainerStarted","Data":"7b107e9aada817afbaf90ceedd3fa31014fddcd8e9043e5d9bbea6209671179a"} Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.031774 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-qjq94" podStartSLOduration=2.073925935 podStartE2EDuration="5.031764943s" podCreationTimestamp="2025-12-01 06:52:20 +0000 UTC" firstStartedPulling="2025-12-01 06:52:20.957323523 +0000 UTC m=+550.522336496" lastFinishedPulling="2025-12-01 06:52:23.915162531 +0000 UTC m=+553.480175504" observedRunningTime="2025-12-01 06:52:25.027900509 +0000 UTC m=+554.592913482" watchObservedRunningTime="2025-12-01 06:52:25.031764943 +0000 UTC m=+554.596777915" Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.038612 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-m9dcd" podStartSLOduration=2.487493278 podStartE2EDuration="5.038599991s" podCreationTimestamp="2025-12-01 06:52:20 +0000 UTC" firstStartedPulling="2025-12-01 06:52:21.35669175 +0000 UTC m=+550.921704724" lastFinishedPulling="2025-12-01 06:52:23.907798464 +0000 UTC m=+553.472811437" observedRunningTime="2025-12-01 06:52:25.037758834 +0000 UTC m=+554.602771807" watchObservedRunningTime="2025-12-01 06:52:25.038599991 +0000 UTC m=+554.603612965" Dec 01 06:52:25 crc kubenswrapper[4632]: I1201 06:52:25.053735 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" podStartSLOduration=3.017265298 podStartE2EDuration="5.053719436s" podCreationTimestamp="2025-12-01 06:52:20 +0000 UTC" firstStartedPulling="2025-12-01 06:52:21.870683659 +0000 UTC m=+551.435696632" lastFinishedPulling="2025-12-01 06:52:23.907137798 +0000 UTC m=+553.472150770" observedRunningTime="2025-12-01 06:52:25.052096883 +0000 UTC m=+554.617109857" watchObservedRunningTime="2025-12-01 06:52:25.053719436 +0000 UTC m=+554.618732409" Dec 01 06:52:27 crc kubenswrapper[4632]: I1201 06:52:27.032828 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" event={"ID":"3b03b7ed-47ab-4ae0-95e7-ed1c830fe065","Type":"ContainerStarted","Data":"e0660f59bd7bd4ad78d761e22a9d18dac25b8698bf2313fc30e97e3d6c199cc4"} Dec 01 06:52:27 crc kubenswrapper[4632]: I1201 06:52:27.046782 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ngl6v" podStartSLOduration=2.320717976 podStartE2EDuration="7.046766685s" podCreationTimestamp="2025-12-01 06:52:20 +0000 UTC" firstStartedPulling="2025-12-01 06:52:21.276616781 +0000 UTC m=+550.841629754" lastFinishedPulling="2025-12-01 06:52:26.002665491 +0000 UTC m=+555.567678463" observedRunningTime="2025-12-01 06:52:27.043630597 +0000 UTC m=+556.608643570" watchObservedRunningTime="2025-12-01 06:52:27.046766685 +0000 UTC m=+556.611779658" Dec 01 06:52:30 crc kubenswrapper[4632]: I1201 06:52:30.953425 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-qjq94" Dec 01 06:52:31 crc kubenswrapper[4632]: I1201 06:52:31.184564 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:31 crc kubenswrapper[4632]: I1201 06:52:31.184631 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:31 crc kubenswrapper[4632]: I1201 06:52:31.188434 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:32 crc kubenswrapper[4632]: I1201 06:52:32.056662 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5554869996-s6j48" Dec 01 06:52:32 crc kubenswrapper[4632]: I1201 06:52:32.090134 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:52:41 crc kubenswrapper[4632]: I1201 06:52:41.527813 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-hmmp7" Dec 01 06:52:49 crc kubenswrapper[4632]: I1201 06:52:49.498186 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:52:49 crc kubenswrapper[4632]: I1201 06:52:49.498630 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:52:49 crc kubenswrapper[4632]: I1201 06:52:49.498668 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:52:49 crc kubenswrapper[4632]: I1201 06:52:49.499030 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:52:49 crc kubenswrapper[4632]: I1201 06:52:49.499095 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35" gracePeriod=600 Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.141518 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35" exitCode=0 Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.141573 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35"} Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.142932 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637"} Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.142983 4632 scope.go:117] "RemoveContainer" containerID="b2e0f53c88b4c1d159446108cc543bcd91eef18bfb8a923bef5d472620c42a5a" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.437278 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt"] Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.438404 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.440306 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.443732 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt"] Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.536256 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42ds6\" (UniqueName: \"kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.536332 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.536382 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.637752 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42ds6\" (UniqueName: \"kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.637812 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.637832 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.638295 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.638311 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.652334 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42ds6\" (UniqueName: \"kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:50 crc kubenswrapper[4632]: I1201 06:52:50.751444 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:51 crc kubenswrapper[4632]: I1201 06:52:51.077771 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt"] Dec 01 06:52:51 crc kubenswrapper[4632]: W1201 06:52:51.084149 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5f12f82_cb0c_4db9_bb02_4da44d980004.slice/crio-474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374 WatchSource:0}: Error finding container 474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374: Status 404 returned error can't find the container with id 474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374 Dec 01 06:52:51 crc kubenswrapper[4632]: I1201 06:52:51.147857 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" event={"ID":"f5f12f82-cb0c-4db9-bb02-4da44d980004","Type":"ContainerStarted","Data":"474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374"} Dec 01 06:52:52 crc kubenswrapper[4632]: I1201 06:52:52.155089 4632 generic.go:334] "Generic (PLEG): container finished" podID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerID="7d48c103b57d468f7ee7ba0edc6788f770c7c340c256670c00b1799a9bdb68d6" exitCode=0 Dec 01 06:52:52 crc kubenswrapper[4632]: I1201 06:52:52.155165 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" event={"ID":"f5f12f82-cb0c-4db9-bb02-4da44d980004","Type":"ContainerDied","Data":"7d48c103b57d468f7ee7ba0edc6788f770c7c340c256670c00b1799a9bdb68d6"} Dec 01 06:52:54 crc kubenswrapper[4632]: I1201 06:52:54.164604 4632 generic.go:334] "Generic (PLEG): container finished" podID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerID="898a68432de6e25be188a106c94484753b4536a33c8ac9dc17eb91e54e857384" exitCode=0 Dec 01 06:52:54 crc kubenswrapper[4632]: I1201 06:52:54.164683 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" event={"ID":"f5f12f82-cb0c-4db9-bb02-4da44d980004","Type":"ContainerDied","Data":"898a68432de6e25be188a106c94484753b4536a33c8ac9dc17eb91e54e857384"} Dec 01 06:52:55 crc kubenswrapper[4632]: I1201 06:52:55.170116 4632 generic.go:334] "Generic (PLEG): container finished" podID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerID="36691f448beefc61f97c060da29219cbaae727430f6c4ddb2282d05049add901" exitCode=0 Dec 01 06:52:55 crc kubenswrapper[4632]: I1201 06:52:55.170145 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" event={"ID":"f5f12f82-cb0c-4db9-bb02-4da44d980004","Type":"ContainerDied","Data":"36691f448beefc61f97c060da29219cbaae727430f6c4ddb2282d05049add901"} Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.350149 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.397610 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle\") pod \"f5f12f82-cb0c-4db9-bb02-4da44d980004\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.397706 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-42ds6\" (UniqueName: \"kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6\") pod \"f5f12f82-cb0c-4db9-bb02-4da44d980004\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.397748 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util\") pod \"f5f12f82-cb0c-4db9-bb02-4da44d980004\" (UID: \"f5f12f82-cb0c-4db9-bb02-4da44d980004\") " Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.398516 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle" (OuterVolumeSpecName: "bundle") pod "f5f12f82-cb0c-4db9-bb02-4da44d980004" (UID: "f5f12f82-cb0c-4db9-bb02-4da44d980004"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.402615 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6" (OuterVolumeSpecName: "kube-api-access-42ds6") pod "f5f12f82-cb0c-4db9-bb02-4da44d980004" (UID: "f5f12f82-cb0c-4db9-bb02-4da44d980004"). InnerVolumeSpecName "kube-api-access-42ds6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.486663 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util" (OuterVolumeSpecName: "util") pod "f5f12f82-cb0c-4db9-bb02-4da44d980004" (UID: "f5f12f82-cb0c-4db9-bb02-4da44d980004"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.499295 4632 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-util\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.499317 4632 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f5f12f82-cb0c-4db9-bb02-4da44d980004-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:56 crc kubenswrapper[4632]: I1201 06:52:56.499326 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42ds6\" (UniqueName: \"kubernetes.io/projected/f5f12f82-cb0c-4db9-bb02-4da44d980004-kube-api-access-42ds6\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.118404 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-52x8r" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerName="console" containerID="cri-o://2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74" gracePeriod=15 Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.180267 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" event={"ID":"f5f12f82-cb0c-4db9-bb02-4da44d980004","Type":"ContainerDied","Data":"474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374"} Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.180299 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="474ec84fc2227976c257ddbe42c1ab063e31007d0a9fe56d55b456ffcc38c374" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.180314 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.393177 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-52x8r_ba580937-8b3b-404d-a9fe-3d4e014ce6b1/console/0.log" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.393233 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507803 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507842 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzfzr\" (UniqueName: \"kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507885 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507914 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507937 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507968 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.507985 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca\") pod \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\" (UID: \"ba580937-8b3b-404d-a9fe-3d4e014ce6b1\") " Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.508532 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.508540 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca" (OuterVolumeSpecName: "service-ca") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.508557 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.508878 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config" (OuterVolumeSpecName: "console-config") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.512316 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.512576 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr" (OuterVolumeSpecName: "kube-api-access-xzfzr") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "kube-api-access-xzfzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.513018 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "ba580937-8b3b-404d-a9fe-3d4e014ce6b1" (UID: "ba580937-8b3b-404d-a9fe-3d4e014ce6b1"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608701 4632 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-service-ca\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608736 4632 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608748 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzfzr\" (UniqueName: \"kubernetes.io/projected/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-kube-api-access-xzfzr\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608756 4632 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608764 4632 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608771 4632 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:57 crc kubenswrapper[4632]: I1201 06:52:57.608778 4632 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/ba580937-8b3b-404d-a9fe-3d4e014ce6b1-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197196 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-52x8r_ba580937-8b3b-404d-a9fe-3d4e014ce6b1/console/0.log" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197251 4632 generic.go:334] "Generic (PLEG): container finished" podID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerID="2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74" exitCode=2 Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197279 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-52x8r" event={"ID":"ba580937-8b3b-404d-a9fe-3d4e014ce6b1","Type":"ContainerDied","Data":"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74"} Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197303 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-52x8r" event={"ID":"ba580937-8b3b-404d-a9fe-3d4e014ce6b1","Type":"ContainerDied","Data":"6307f6d97bd31ea884598fa1055cd83026aa404ea824f22d1edc47c85fca5ebf"} Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197320 4632 scope.go:117] "RemoveContainer" containerID="2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.197484 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-52x8r" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.216140 4632 scope.go:117] "RemoveContainer" containerID="2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74" Dec 01 06:52:58 crc kubenswrapper[4632]: E1201 06:52:58.216697 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74\": container with ID starting with 2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74 not found: ID does not exist" containerID="2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.216739 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74"} err="failed to get container status \"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74\": rpc error: code = NotFound desc = could not find container \"2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74\": container with ID starting with 2852c63177091ff230279b0962afb0b1f1897857744826de9d5634a183defc74 not found: ID does not exist" Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.219077 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.223077 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-52x8r"] Dec 01 06:52:58 crc kubenswrapper[4632]: I1201 06:52:58.755598 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" path="/var/lib/kubelet/pods/ba580937-8b3b-404d-a9fe-3d4e014ce6b1/volumes" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645034 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl"] Dec 01 06:53:07 crc kubenswrapper[4632]: E1201 06:53:07.645770 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="pull" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645783 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="pull" Dec 01 06:53:07 crc kubenswrapper[4632]: E1201 06:53:07.645799 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="extract" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645804 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="extract" Dec 01 06:53:07 crc kubenswrapper[4632]: E1201 06:53:07.645814 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="util" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645819 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="util" Dec 01 06:53:07 crc kubenswrapper[4632]: E1201 06:53:07.645825 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerName="console" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645830 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerName="console" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645910 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f12f82-cb0c-4db9-bb02-4da44d980004" containerName="extract" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.645920 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba580937-8b3b-404d-a9fe-3d4e014ce6b1" containerName="console" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.646227 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.648166 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.648329 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-mrg92" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.651498 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.667879 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.670613 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.676227 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl"] Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.725916 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-apiservice-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.725967 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-webhook-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.726029 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42srz\" (UniqueName: \"kubernetes.io/projected/4cceec72-bb6e-43a4-8b98-8077e45f281c-kube-api-access-42srz\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.827493 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-apiservice-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.827541 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-webhook-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.827567 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42srz\" (UniqueName: \"kubernetes.io/projected/4cceec72-bb6e-43a4-8b98-8077e45f281c-kube-api-access-42srz\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.834744 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-apiservice-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.834744 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4cceec72-bb6e-43a4-8b98-8077e45f281c-webhook-cert\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.846066 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42srz\" (UniqueName: \"kubernetes.io/projected/4cceec72-bb6e-43a4-8b98-8077e45f281c-kube-api-access-42srz\") pod \"metallb-operator-controller-manager-5cdb7cf54d-h7wfl\" (UID: \"4cceec72-bb6e-43a4-8b98-8077e45f281c\") " pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.974722 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk"] Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.975601 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.977334 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.977649 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.977722 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.979427 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-5wgp2" Dec 01 06:53:07 crc kubenswrapper[4632]: I1201 06:53:07.987739 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk"] Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.029915 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtmdb\" (UniqueName: \"kubernetes.io/projected/c28a59da-1614-46fc-9ece-a4c888e9c53c-kube-api-access-qtmdb\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.030158 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-apiservice-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.030338 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-webhook-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.132811 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-apiservice-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.132864 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-webhook-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.132911 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtmdb\" (UniqueName: \"kubernetes.io/projected/c28a59da-1614-46fc-9ece-a4c888e9c53c-kube-api-access-qtmdb\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.136907 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-apiservice-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.137631 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c28a59da-1614-46fc-9ece-a4c888e9c53c-webhook-cert\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.148798 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtmdb\" (UniqueName: \"kubernetes.io/projected/c28a59da-1614-46fc-9ece-a4c888e9c53c-kube-api-access-qtmdb\") pod \"metallb-operator-webhook-server-696b4c485-xvdlk\" (UID: \"c28a59da-1614-46fc-9ece-a4c888e9c53c\") " pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.325780 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.366040 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl"] Dec 01 06:53:08 crc kubenswrapper[4632]: I1201 06:53:08.717162 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk"] Dec 01 06:53:08 crc kubenswrapper[4632]: W1201 06:53:08.724405 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc28a59da_1614_46fc_9ece_a4c888e9c53c.slice/crio-f4608915586b456bcd7c0e7b054193fdd43408bf5c3280df46a526acad46fb6a WatchSource:0}: Error finding container f4608915586b456bcd7c0e7b054193fdd43408bf5c3280df46a526acad46fb6a: Status 404 returned error can't find the container with id f4608915586b456bcd7c0e7b054193fdd43408bf5c3280df46a526acad46fb6a Dec 01 06:53:09 crc kubenswrapper[4632]: I1201 06:53:09.248906 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" event={"ID":"4cceec72-bb6e-43a4-8b98-8077e45f281c","Type":"ContainerStarted","Data":"5c89c4f76c871f7688a02c595709587d7fe47976639034f9735fa357dfcdf840"} Dec 01 06:53:09 crc kubenswrapper[4632]: I1201 06:53:09.249848 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" event={"ID":"c28a59da-1614-46fc-9ece-a4c888e9c53c","Type":"ContainerStarted","Data":"f4608915586b456bcd7c0e7b054193fdd43408bf5c3280df46a526acad46fb6a"} Dec 01 06:53:11 crc kubenswrapper[4632]: I1201 06:53:11.260572 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" event={"ID":"4cceec72-bb6e-43a4-8b98-8077e45f281c","Type":"ContainerStarted","Data":"2466529b7dbfff00ac79c6de2020878656ed09aa12aa85c3037146bc468e48c5"} Dec 01 06:53:11 crc kubenswrapper[4632]: I1201 06:53:11.261125 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:11 crc kubenswrapper[4632]: I1201 06:53:11.277057 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" podStartSLOduration=1.6133917279999999 podStartE2EDuration="4.277042769s" podCreationTimestamp="2025-12-01 06:53:07 +0000 UTC" firstStartedPulling="2025-12-01 06:53:08.382984788 +0000 UTC m=+597.947997761" lastFinishedPulling="2025-12-01 06:53:11.046635839 +0000 UTC m=+600.611648802" observedRunningTime="2025-12-01 06:53:11.27386354 +0000 UTC m=+600.838876513" watchObservedRunningTime="2025-12-01 06:53:11.277042769 +0000 UTC m=+600.842055742" Dec 01 06:53:13 crc kubenswrapper[4632]: I1201 06:53:13.272292 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" event={"ID":"c28a59da-1614-46fc-9ece-a4c888e9c53c","Type":"ContainerStarted","Data":"8142e130fb914e645d589e29b053e1ebef16241a54a200125bc99867718df55e"} Dec 01 06:53:13 crc kubenswrapper[4632]: I1201 06:53:13.273396 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:13 crc kubenswrapper[4632]: I1201 06:53:13.291328 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" podStartSLOduration=2.773677964 podStartE2EDuration="6.291302071s" podCreationTimestamp="2025-12-01 06:53:07 +0000 UTC" firstStartedPulling="2025-12-01 06:53:08.727537319 +0000 UTC m=+598.292550291" lastFinishedPulling="2025-12-01 06:53:12.245161424 +0000 UTC m=+601.810174398" observedRunningTime="2025-12-01 06:53:13.286313127 +0000 UTC m=+602.851326100" watchObservedRunningTime="2025-12-01 06:53:13.291302071 +0000 UTC m=+602.856315035" Dec 01 06:53:28 crc kubenswrapper[4632]: I1201 06:53:28.330092 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-696b4c485-xvdlk" Dec 01 06:53:47 crc kubenswrapper[4632]: I1201 06:53:47.980594 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-5cdb7cf54d-h7wfl" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.550920 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-7nr8t"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.553228 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.554128 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.554932 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.555214 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.557529 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.557534 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.558094 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-jxrpw" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.562585 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.622581 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-rld7v"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.623684 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.625142 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.625545 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.625623 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.625777 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-66fct" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.636492 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-knsfp"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.637435 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.638595 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.646806 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-knsfp"] Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652094 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metallb-excludel2\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652131 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1da19bfe-a759-44f5-9839-b638c45f84b8-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652153 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652184 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxqwf\" (UniqueName: \"kubernetes.io/projected/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-kube-api-access-cxqwf\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652205 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-startup\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652219 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-reloader\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652234 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics-certs\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652283 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-conf\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652309 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wv72\" (UniqueName: \"kubernetes.io/projected/1da19bfe-a759-44f5-9839-b638c45f84b8-kube-api-access-6wv72\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652329 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652346 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-sockets\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652380 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxp5m\" (UniqueName: \"kubernetes.io/projected/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-kube-api-access-rxp5m\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.652410 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metrics-certs\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753394 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-conf\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753437 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wv72\" (UniqueName: \"kubernetes.io/projected/1da19bfe-a759-44f5-9839-b638c45f84b8-kube-api-access-6wv72\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753461 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753482 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-sockets\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753508 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxp5m\" (UniqueName: \"kubernetes.io/projected/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-kube-api-access-rxp5m\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753542 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-metrics-certs\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753582 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-cert\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753597 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metrics-certs\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: E1201 06:53:48.753614 4632 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 06:53:48 crc kubenswrapper[4632]: E1201 06:53:48.753673 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist podName:1f8d778c-51f1-4fed-8f3a-34bd3f603d06 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:49.253656316 +0000 UTC m=+638.818669288 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist") pod "speaker-rld7v" (UID: "1f8d778c-51f1-4fed-8f3a-34bd3f603d06") : secret "metallb-memberlist" not found Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753624 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metallb-excludel2\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753839 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1da19bfe-a759-44f5-9839-b638c45f84b8-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753857 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-conf\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753872 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753904 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zrs2\" (UniqueName: \"kubernetes.io/projected/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-kube-api-access-5zrs2\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753962 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxqwf\" (UniqueName: \"kubernetes.io/projected/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-kube-api-access-cxqwf\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.753988 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-startup\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754007 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-reloader\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754030 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics-certs\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754078 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-sockets\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754329 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metallb-excludel2\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754665 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.754701 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-reloader\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.755650 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-frr-startup\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.760693 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-metrics-certs\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.760807 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1da19bfe-a759-44f5-9839-b638c45f84b8-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.762796 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-metrics-certs\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.768297 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wv72\" (UniqueName: \"kubernetes.io/projected/1da19bfe-a759-44f5-9839-b638c45f84b8-kube-api-access-6wv72\") pod \"frr-k8s-webhook-server-7fcb986d4-z9drr\" (UID: \"1da19bfe-a759-44f5-9839-b638c45f84b8\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.768809 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxqwf\" (UniqueName: \"kubernetes.io/projected/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-kube-api-access-cxqwf\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.769139 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxp5m\" (UniqueName: \"kubernetes.io/projected/9ef9114d-cef9-41d1-ae4f-be2037eebd1e-kube-api-access-rxp5m\") pod \"frr-k8s-7nr8t\" (UID: \"9ef9114d-cef9-41d1-ae4f-be2037eebd1e\") " pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.855444 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zrs2\" (UniqueName: \"kubernetes.io/projected/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-kube-api-access-5zrs2\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.855578 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-metrics-certs\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.855627 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-cert\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.857787 4632 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.858777 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-metrics-certs\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.867011 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.868387 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-cert\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.868841 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zrs2\" (UniqueName: \"kubernetes.io/projected/e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe-kube-api-access-5zrs2\") pod \"controller-f8648f98b-knsfp\" (UID: \"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe\") " pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.873597 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:48 crc kubenswrapper[4632]: I1201 06:53:48.947706 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.226089 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr"] Dec 01 06:53:49 crc kubenswrapper[4632]: W1201 06:53:49.230781 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1da19bfe_a759_44f5_9839_b638c45f84b8.slice/crio-959c44ff5004678ad5bc3470a88523e979eb99894b8ea624573ad507b582912d WatchSource:0}: Error finding container 959c44ff5004678ad5bc3470a88523e979eb99894b8ea624573ad507b582912d: Status 404 returned error can't find the container with id 959c44ff5004678ad5bc3470a88523e979eb99894b8ea624573ad507b582912d Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.264142 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:49 crc kubenswrapper[4632]: E1201 06:53:49.264322 4632 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 01 06:53:49 crc kubenswrapper[4632]: E1201 06:53:49.264404 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist podName:1f8d778c-51f1-4fed-8f3a-34bd3f603d06 nodeName:}" failed. No retries permitted until 2025-12-01 06:53:50.264389629 +0000 UTC m=+639.829402602 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist") pod "speaker-rld7v" (UID: "1f8d778c-51f1-4fed-8f3a-34bd3f603d06") : secret "metallb-memberlist" not found Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.304253 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-knsfp"] Dec 01 06:53:49 crc kubenswrapper[4632]: W1201 06:53:49.308217 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2dcb1c6_c676_46e9_85b4_c0a3deb9e2fe.slice/crio-70f953268cc826f2aec73f8b4d7ccf38fa78364262251a885e8d506a9eae191f WatchSource:0}: Error finding container 70f953268cc826f2aec73f8b4d7ccf38fa78364262251a885e8d506a9eae191f: Status 404 returned error can't find the container with id 70f953268cc826f2aec73f8b4d7ccf38fa78364262251a885e8d506a9eae191f Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.461509 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" event={"ID":"1da19bfe-a759-44f5-9839-b638c45f84b8","Type":"ContainerStarted","Data":"959c44ff5004678ad5bc3470a88523e979eb99894b8ea624573ad507b582912d"} Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.462484 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"0501cda8bdf038c34f1851205a0b4f3df046d8e4775058c42dabfbdd573c0d15"} Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.464018 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-knsfp" event={"ID":"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe","Type":"ContainerStarted","Data":"505b168672c05c5a16bf2513fa39c437e5259dbbd0a01e4c6bbf8ea9741a8e05"} Dec 01 06:53:49 crc kubenswrapper[4632]: I1201 06:53:49.464049 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-knsfp" event={"ID":"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe","Type":"ContainerStarted","Data":"70f953268cc826f2aec73f8b4d7ccf38fa78364262251a885e8d506a9eae191f"} Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.282904 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.289571 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/1f8d778c-51f1-4fed-8f3a-34bd3f603d06-memberlist\") pod \"speaker-rld7v\" (UID: \"1f8d778c-51f1-4fed-8f3a-34bd3f603d06\") " pod="metallb-system/speaker-rld7v" Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.435425 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-rld7v" Dec 01 06:53:50 crc kubenswrapper[4632]: W1201 06:53:50.461788 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f8d778c_51f1_4fed_8f3a_34bd3f603d06.slice/crio-ed72e8c16b7c911a647dcac75f90f084d6f3a9631af3f150de0f86bca3f132d3 WatchSource:0}: Error finding container ed72e8c16b7c911a647dcac75f90f084d6f3a9631af3f150de0f86bca3f132d3: Status 404 returned error can't find the container with id ed72e8c16b7c911a647dcac75f90f084d6f3a9631af3f150de0f86bca3f132d3 Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.469459 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rld7v" event={"ID":"1f8d778c-51f1-4fed-8f3a-34bd3f603d06","Type":"ContainerStarted","Data":"ed72e8c16b7c911a647dcac75f90f084d6f3a9631af3f150de0f86bca3f132d3"} Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.471313 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-knsfp" event={"ID":"e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe","Type":"ContainerStarted","Data":"883acaea7ff3f87c7d402581099dc59105a9018a2fa1c3cd9d4eeb1507558083"} Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.472302 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:53:50 crc kubenswrapper[4632]: I1201 06:53:50.781490 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-knsfp" podStartSLOduration=2.781473172 podStartE2EDuration="2.781473172s" podCreationTimestamp="2025-12-01 06:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:50.492089358 +0000 UTC m=+640.057102351" watchObservedRunningTime="2025-12-01 06:53:50.781473172 +0000 UTC m=+640.346486145" Dec 01 06:53:51 crc kubenswrapper[4632]: I1201 06:53:51.479330 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rld7v" event={"ID":"1f8d778c-51f1-4fed-8f3a-34bd3f603d06","Type":"ContainerStarted","Data":"16dd5c4bc7fbddb36cbc9d06f31ddeb473af21a7264ac28240224be5b603bd6f"} Dec 01 06:53:51 crc kubenswrapper[4632]: I1201 06:53:51.479945 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-rld7v" event={"ID":"1f8d778c-51f1-4fed-8f3a-34bd3f603d06","Type":"ContainerStarted","Data":"2f5eee22349a3fd9532778325b28dece1e6d726be33cbbae67564f670f996bf6"} Dec 01 06:53:51 crc kubenswrapper[4632]: I1201 06:53:51.496240 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-rld7v" podStartSLOduration=3.496214909 podStartE2EDuration="3.496214909s" podCreationTimestamp="2025-12-01 06:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:53:51.495531019 +0000 UTC m=+641.060543992" watchObservedRunningTime="2025-12-01 06:53:51.496214909 +0000 UTC m=+641.061227882" Dec 01 06:53:52 crc kubenswrapper[4632]: I1201 06:53:52.485898 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-rld7v" Dec 01 06:53:55 crc kubenswrapper[4632]: I1201 06:53:55.506185 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" event={"ID":"1da19bfe-a759-44f5-9839-b638c45f84b8","Type":"ContainerStarted","Data":"6d1f1cbcd1299b67e2fb7132ac2cc6b9fcfd7827b17f83f5a383087d092d632c"} Dec 01 06:53:55 crc kubenswrapper[4632]: I1201 06:53:55.506275 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:53:55 crc kubenswrapper[4632]: I1201 06:53:55.508800 4632 generic.go:334] "Generic (PLEG): container finished" podID="9ef9114d-cef9-41d1-ae4f-be2037eebd1e" containerID="1a6eac2b2af6c3840bd1fd78fcfd1f13623e774ccf66e88858aeca7a1e73b8eb" exitCode=0 Dec 01 06:53:55 crc kubenswrapper[4632]: I1201 06:53:55.508856 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerDied","Data":"1a6eac2b2af6c3840bd1fd78fcfd1f13623e774ccf66e88858aeca7a1e73b8eb"} Dec 01 06:53:55 crc kubenswrapper[4632]: I1201 06:53:55.521014 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" podStartSLOduration=1.486685914 podStartE2EDuration="7.520991321s" podCreationTimestamp="2025-12-01 06:53:48 +0000 UTC" firstStartedPulling="2025-12-01 06:53:49.233593465 +0000 UTC m=+638.798606437" lastFinishedPulling="2025-12-01 06:53:55.267898871 +0000 UTC m=+644.832911844" observedRunningTime="2025-12-01 06:53:55.518431511 +0000 UTC m=+645.083444483" watchObservedRunningTime="2025-12-01 06:53:55.520991321 +0000 UTC m=+645.086004295" Dec 01 06:53:56 crc kubenswrapper[4632]: I1201 06:53:56.516375 4632 generic.go:334] "Generic (PLEG): container finished" podID="9ef9114d-cef9-41d1-ae4f-be2037eebd1e" containerID="92302bfbdfb200f3bc9f72ca2dfd2a9d0a11129448985f7fbd6efaac9e852dbf" exitCode=0 Dec 01 06:53:56 crc kubenswrapper[4632]: I1201 06:53:56.516468 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerDied","Data":"92302bfbdfb200f3bc9f72ca2dfd2a9d0a11129448985f7fbd6efaac9e852dbf"} Dec 01 06:53:57 crc kubenswrapper[4632]: I1201 06:53:57.526643 4632 generic.go:334] "Generic (PLEG): container finished" podID="9ef9114d-cef9-41d1-ae4f-be2037eebd1e" containerID="e7f6590e634da7b9373f45b92c6dca7374d39dc3c0ddf6ce363a4d002318c135" exitCode=0 Dec 01 06:53:57 crc kubenswrapper[4632]: I1201 06:53:57.526710 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerDied","Data":"e7f6590e634da7b9373f45b92c6dca7374d39dc3c0ddf6ce363a4d002318c135"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542298 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"eba20265962369b43c8fc22ea19fa5b89c6e78921e2c777f46c6a8b90ab68aed"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542378 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"4fa86b2cd129a8fd405feea2d05049954f5acdf86bae8cec683aa8d42927094a"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542393 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"77047a032dd441a638713d4198d1578acda99e6be2dd7490168e5586793aec3c"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542403 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"42d24ac758716bbec0a9d6ed0253468751b7c72bef8d5c46f23ca1db588c79e7"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542412 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"c94223eba2c08e31675ef97ca5e595fa3833f34c5b9518bd1561a78c633b5e1d"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542421 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-7nr8t" event={"ID":"9ef9114d-cef9-41d1-ae4f-be2037eebd1e","Type":"ContainerStarted","Data":"89540762107cbdeca183cd77c756abdbbd4f3c562312c1e689a78c004c64b1f3"} Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.542780 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.567085 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-7nr8t" podStartSLOduration=4.270851511 podStartE2EDuration="10.567065685s" podCreationTimestamp="2025-12-01 06:53:48 +0000 UTC" firstStartedPulling="2025-12-01 06:53:48.976768531 +0000 UTC m=+638.541781504" lastFinishedPulling="2025-12-01 06:53:55.272982705 +0000 UTC m=+644.837995678" observedRunningTime="2025-12-01 06:53:58.56090469 +0000 UTC m=+648.125917664" watchObservedRunningTime="2025-12-01 06:53:58.567065685 +0000 UTC m=+648.132078659" Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.867404 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:53:58 crc kubenswrapper[4632]: I1201 06:53:58.897251 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:54:00 crc kubenswrapper[4632]: I1201 06:54:00.439802 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-rld7v" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.043318 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.044576 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.046099 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.046369 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-rcftq" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.046750 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.055490 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.069759 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv8c2\" (UniqueName: \"kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2\") pod \"openstack-operator-index-jp7zd\" (UID: \"3944c06a-c94d-44fa-a520-4363ad48a11e\") " pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.171516 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv8c2\" (UniqueName: \"kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2\") pod \"openstack-operator-index-jp7zd\" (UID: \"3944c06a-c94d-44fa-a520-4363ad48a11e\") " pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.188185 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv8c2\" (UniqueName: \"kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2\") pod \"openstack-operator-index-jp7zd\" (UID: \"3944c06a-c94d-44fa-a520-4363ad48a11e\") " pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.365596 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:03 crc kubenswrapper[4632]: I1201 06:54:03.744464 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:03 crc kubenswrapper[4632]: W1201 06:54:03.748982 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3944c06a_c94d_44fa_a520_4363ad48a11e.slice/crio-d0f0b6cb69454e74b766cf9baa68b5f72793dc377c6a36d7b66e68650cad9f39 WatchSource:0}: Error finding container d0f0b6cb69454e74b766cf9baa68b5f72793dc377c6a36d7b66e68650cad9f39: Status 404 returned error can't find the container with id d0f0b6cb69454e74b766cf9baa68b5f72793dc377c6a36d7b66e68650cad9f39 Dec 01 06:54:04 crc kubenswrapper[4632]: I1201 06:54:04.576036 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jp7zd" event={"ID":"3944c06a-c94d-44fa-a520-4363ad48a11e","Type":"ContainerStarted","Data":"d0f0b6cb69454e74b766cf9baa68b5f72793dc377c6a36d7b66e68650cad9f39"} Dec 01 06:54:05 crc kubenswrapper[4632]: I1201 06:54:05.583207 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jp7zd" event={"ID":"3944c06a-c94d-44fa-a520-4363ad48a11e","Type":"ContainerStarted","Data":"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77"} Dec 01 06:54:05 crc kubenswrapper[4632]: I1201 06:54:05.598813 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-jp7zd" podStartSLOduration=1.304519285 podStartE2EDuration="2.59879171s" podCreationTimestamp="2025-12-01 06:54:03 +0000 UTC" firstStartedPulling="2025-12-01 06:54:03.7513143 +0000 UTC m=+653.316327273" lastFinishedPulling="2025-12-01 06:54:05.045586725 +0000 UTC m=+654.610599698" observedRunningTime="2025-12-01 06:54:05.597061664 +0000 UTC m=+655.162074638" watchObservedRunningTime="2025-12-01 06:54:05.59879171 +0000 UTC m=+655.163804682" Dec 01 06:54:06 crc kubenswrapper[4632]: I1201 06:54:06.393698 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:06 crc kubenswrapper[4632]: I1201 06:54:06.996699 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-x4g48"] Dec 01 06:54:06 crc kubenswrapper[4632]: I1201 06:54:06.997332 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.003692 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x4g48"] Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.031173 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz57w\" (UniqueName: \"kubernetes.io/projected/33f69e4f-eb93-4113-80d0-b50fdc5a83f7-kube-api-access-zz57w\") pod \"openstack-operator-index-x4g48\" (UID: \"33f69e4f-eb93-4113-80d0-b50fdc5a83f7\") " pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.133714 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz57w\" (UniqueName: \"kubernetes.io/projected/33f69e4f-eb93-4113-80d0-b50fdc5a83f7-kube-api-access-zz57w\") pod \"openstack-operator-index-x4g48\" (UID: \"33f69e4f-eb93-4113-80d0-b50fdc5a83f7\") " pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.154118 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz57w\" (UniqueName: \"kubernetes.io/projected/33f69e4f-eb93-4113-80d0-b50fdc5a83f7-kube-api-access-zz57w\") pod \"openstack-operator-index-x4g48\" (UID: \"33f69e4f-eb93-4113-80d0-b50fdc5a83f7\") " pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.311787 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.594970 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-jp7zd" podUID="3944c06a-c94d-44fa-a520-4363ad48a11e" containerName="registry-server" containerID="cri-o://d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77" gracePeriod=2 Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.669611 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x4g48"] Dec 01 06:54:07 crc kubenswrapper[4632]: W1201 06:54:07.678530 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33f69e4f_eb93_4113_80d0_b50fdc5a83f7.slice/crio-14a0d777f7f659dff2f44c2643b2a7e4105b73d3d86417d2c17c6b61e5860d80 WatchSource:0}: Error finding container 14a0d777f7f659dff2f44c2643b2a7e4105b73d3d86417d2c17c6b61e5860d80: Status 404 returned error can't find the container with id 14a0d777f7f659dff2f44c2643b2a7e4105b73d3d86417d2c17c6b61e5860d80 Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.900020 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.953295 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv8c2\" (UniqueName: \"kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2\") pod \"3944c06a-c94d-44fa-a520-4363ad48a11e\" (UID: \"3944c06a-c94d-44fa-a520-4363ad48a11e\") " Dec 01 06:54:07 crc kubenswrapper[4632]: I1201 06:54:07.957028 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2" (OuterVolumeSpecName: "kube-api-access-mv8c2") pod "3944c06a-c94d-44fa-a520-4363ad48a11e" (UID: "3944c06a-c94d-44fa-a520-4363ad48a11e"). InnerVolumeSpecName "kube-api-access-mv8c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.055727 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv8c2\" (UniqueName: \"kubernetes.io/projected/3944c06a-c94d-44fa-a520-4363ad48a11e-kube-api-access-mv8c2\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.605341 4632 generic.go:334] "Generic (PLEG): container finished" podID="3944c06a-c94d-44fa-a520-4363ad48a11e" containerID="d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77" exitCode=0 Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.605411 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-jp7zd" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.605432 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jp7zd" event={"ID":"3944c06a-c94d-44fa-a520-4363ad48a11e","Type":"ContainerDied","Data":"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77"} Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.605862 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-jp7zd" event={"ID":"3944c06a-c94d-44fa-a520-4363ad48a11e","Type":"ContainerDied","Data":"d0f0b6cb69454e74b766cf9baa68b5f72793dc377c6a36d7b66e68650cad9f39"} Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.605884 4632 scope.go:117] "RemoveContainer" containerID="d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.608448 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x4g48" event={"ID":"33f69e4f-eb93-4113-80d0-b50fdc5a83f7","Type":"ContainerStarted","Data":"191dfeb675c1eadb6df63f91c562ce01420787e890b182af7fa07f3bdd41672e"} Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.608539 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x4g48" event={"ID":"33f69e4f-eb93-4113-80d0-b50fdc5a83f7","Type":"ContainerStarted","Data":"14a0d777f7f659dff2f44c2643b2a7e4105b73d3d86417d2c17c6b61e5860d80"} Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.623182 4632 scope.go:117] "RemoveContainer" containerID="d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77" Dec 01 06:54:08 crc kubenswrapper[4632]: E1201 06:54:08.624018 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77\": container with ID starting with d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77 not found: ID does not exist" containerID="d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.624136 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77"} err="failed to get container status \"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77\": rpc error: code = NotFound desc = could not find container \"d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77\": container with ID starting with d0a35ed68e704620288528b20968805f79bb6e5b18f205a3ccacc09023241c77 not found: ID does not exist" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.625283 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-x4g48" podStartSLOduration=2.087398136 podStartE2EDuration="2.62526122s" podCreationTimestamp="2025-12-01 06:54:06 +0000 UTC" firstStartedPulling="2025-12-01 06:54:07.682123916 +0000 UTC m=+657.247136889" lastFinishedPulling="2025-12-01 06:54:08.219987 +0000 UTC m=+657.784999973" observedRunningTime="2025-12-01 06:54:08.622403618 +0000 UTC m=+658.187416592" watchObservedRunningTime="2025-12-01 06:54:08.62526122 +0000 UTC m=+658.190274194" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.634434 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.637760 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-jp7zd"] Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.757889 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3944c06a-c94d-44fa-a520-4363ad48a11e" path="/var/lib/kubelet/pods/3944c06a-c94d-44fa-a520-4363ad48a11e/volumes" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.870654 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-7nr8t" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.882101 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-z9drr" Dec 01 06:54:08 crc kubenswrapper[4632]: I1201 06:54:08.953754 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-knsfp" Dec 01 06:54:17 crc kubenswrapper[4632]: I1201 06:54:17.312435 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:17 crc kubenswrapper[4632]: I1201 06:54:17.313196 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:17 crc kubenswrapper[4632]: I1201 06:54:17.342308 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:17 crc kubenswrapper[4632]: I1201 06:54:17.699623 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-x4g48" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.827989 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn"] Dec 01 06:54:19 crc kubenswrapper[4632]: E1201 06:54:19.828491 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3944c06a-c94d-44fa-a520-4363ad48a11e" containerName="registry-server" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.828506 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3944c06a-c94d-44fa-a520-4363ad48a11e" containerName="registry-server" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.828605 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3944c06a-c94d-44fa-a520-4363ad48a11e" containerName="registry-server" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.829314 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.831015 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-qx874" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.836508 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn"] Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.931170 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.931236 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:19 crc kubenswrapper[4632]: I1201 06:54:19.931309 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l56fn\" (UniqueName: \"kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.033244 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.033379 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.033436 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l56fn\" (UniqueName: \"kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.033711 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.033884 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.052794 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l56fn\" (UniqueName: \"kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn\") pod \"34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.143781 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.504080 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn"] Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.705846 4632 generic.go:334] "Generic (PLEG): container finished" podID="92427059-853f-4a13-8994-5cdf75d48aec" containerID="7b68580b7134a9559c8aaa11e6cd8543eec110b6911039c45f69160b2141cdc3" exitCode=0 Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.705951 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerDied","Data":"7b68580b7134a9559c8aaa11e6cd8543eec110b6911039c45f69160b2141cdc3"} Dec 01 06:54:20 crc kubenswrapper[4632]: I1201 06:54:20.706219 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerStarted","Data":"3271c0dce443d3968c81a3ae358c89842adc2734060742aa9f7134bc08b509f1"} Dec 01 06:54:21 crc kubenswrapper[4632]: I1201 06:54:21.717852 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerStarted","Data":"2fa33283da5ad5204aed66bfd3bba437cff329c495bababdc36ceca2eadbe137"} Dec 01 06:54:22 crc kubenswrapper[4632]: I1201 06:54:22.726463 4632 generic.go:334] "Generic (PLEG): container finished" podID="92427059-853f-4a13-8994-5cdf75d48aec" containerID="2fa33283da5ad5204aed66bfd3bba437cff329c495bababdc36ceca2eadbe137" exitCode=0 Dec 01 06:54:22 crc kubenswrapper[4632]: I1201 06:54:22.726574 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerDied","Data":"2fa33283da5ad5204aed66bfd3bba437cff329c495bababdc36ceca2eadbe137"} Dec 01 06:54:23 crc kubenswrapper[4632]: I1201 06:54:23.733657 4632 generic.go:334] "Generic (PLEG): container finished" podID="92427059-853f-4a13-8994-5cdf75d48aec" containerID="64703abc9ff566abc1ddbc6f97adabd6a75216ed60541e15b223ad2a712718c9" exitCode=0 Dec 01 06:54:23 crc kubenswrapper[4632]: I1201 06:54:23.733710 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerDied","Data":"64703abc9ff566abc1ddbc6f97adabd6a75216ed60541e15b223ad2a712718c9"} Dec 01 06:54:24 crc kubenswrapper[4632]: I1201 06:54:24.939953 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.001495 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util\") pod \"92427059-853f-4a13-8994-5cdf75d48aec\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.001544 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l56fn\" (UniqueName: \"kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn\") pod \"92427059-853f-4a13-8994-5cdf75d48aec\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.001702 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle\") pod \"92427059-853f-4a13-8994-5cdf75d48aec\" (UID: \"92427059-853f-4a13-8994-5cdf75d48aec\") " Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.002459 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle" (OuterVolumeSpecName: "bundle") pod "92427059-853f-4a13-8994-5cdf75d48aec" (UID: "92427059-853f-4a13-8994-5cdf75d48aec"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.007075 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn" (OuterVolumeSpecName: "kube-api-access-l56fn") pod "92427059-853f-4a13-8994-5cdf75d48aec" (UID: "92427059-853f-4a13-8994-5cdf75d48aec"). InnerVolumeSpecName "kube-api-access-l56fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.011890 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util" (OuterVolumeSpecName: "util") pod "92427059-853f-4a13-8994-5cdf75d48aec" (UID: "92427059-853f-4a13-8994-5cdf75d48aec"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.103816 4632 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.103851 4632 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/92427059-853f-4a13-8994-5cdf75d48aec-util\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.103864 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l56fn\" (UniqueName: \"kubernetes.io/projected/92427059-853f-4a13-8994-5cdf75d48aec-kube-api-access-l56fn\") on node \"crc\" DevicePath \"\"" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.748666 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" event={"ID":"92427059-853f-4a13-8994-5cdf75d48aec","Type":"ContainerDied","Data":"3271c0dce443d3968c81a3ae358c89842adc2734060742aa9f7134bc08b509f1"} Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.749044 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3271c0dce443d3968c81a3ae358c89842adc2734060742aa9f7134bc08b509f1" Dec 01 06:54:25 crc kubenswrapper[4632]: I1201 06:54:25.748739 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.783234 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd"] Dec 01 06:54:31 crc kubenswrapper[4632]: E1201 06:54:31.786288 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="util" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.786312 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="util" Dec 01 06:54:31 crc kubenswrapper[4632]: E1201 06:54:31.786324 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="extract" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.786332 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="extract" Dec 01 06:54:31 crc kubenswrapper[4632]: E1201 06:54:31.786346 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="pull" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.786375 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="pull" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.786516 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="92427059-853f-4a13-8994-5cdf75d48aec" containerName="extract" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.787030 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.792263 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-jhv6f" Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.815587 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd"] Dec 01 06:54:31 crc kubenswrapper[4632]: I1201 06:54:31.900973 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsxsf\" (UniqueName: \"kubernetes.io/projected/fc9f0281-8d41-469a-b0f1-2b9f20245a43-kube-api-access-rsxsf\") pod \"openstack-operator-controller-operator-6ddddd9d6f-p8vpd\" (UID: \"fc9f0281-8d41-469a-b0f1-2b9f20245a43\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:32 crc kubenswrapper[4632]: I1201 06:54:32.002603 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsxsf\" (UniqueName: \"kubernetes.io/projected/fc9f0281-8d41-469a-b0f1-2b9f20245a43-kube-api-access-rsxsf\") pod \"openstack-operator-controller-operator-6ddddd9d6f-p8vpd\" (UID: \"fc9f0281-8d41-469a-b0f1-2b9f20245a43\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:32 crc kubenswrapper[4632]: I1201 06:54:32.033444 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsxsf\" (UniqueName: \"kubernetes.io/projected/fc9f0281-8d41-469a-b0f1-2b9f20245a43-kube-api-access-rsxsf\") pod \"openstack-operator-controller-operator-6ddddd9d6f-p8vpd\" (UID: \"fc9f0281-8d41-469a-b0f1-2b9f20245a43\") " pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:32 crc kubenswrapper[4632]: I1201 06:54:32.104183 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:32 crc kubenswrapper[4632]: I1201 06:54:32.500485 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd"] Dec 01 06:54:32 crc kubenswrapper[4632]: I1201 06:54:32.789593 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" event={"ID":"fc9f0281-8d41-469a-b0f1-2b9f20245a43","Type":"ContainerStarted","Data":"ff6c78dd58840859f382b0bf689d52f1def710318798a3397ff5d080aa45668d"} Dec 01 06:54:36 crc kubenswrapper[4632]: I1201 06:54:36.816180 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" event={"ID":"fc9f0281-8d41-469a-b0f1-2b9f20245a43","Type":"ContainerStarted","Data":"ab76b1d141d93aea7cf83c964b601788af7bc2a4f929fba34d5a6681fc37a820"} Dec 01 06:54:36 crc kubenswrapper[4632]: I1201 06:54:36.816896 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:36 crc kubenswrapper[4632]: I1201 06:54:36.864646 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" podStartSLOduration=1.855298585 podStartE2EDuration="5.864628849s" podCreationTimestamp="2025-12-01 06:54:31 +0000 UTC" firstStartedPulling="2025-12-01 06:54:32.508296414 +0000 UTC m=+682.073309387" lastFinishedPulling="2025-12-01 06:54:36.517626678 +0000 UTC m=+686.082639651" observedRunningTime="2025-12-01 06:54:36.858992423 +0000 UTC m=+686.424005396" watchObservedRunningTime="2025-12-01 06:54:36.864628849 +0000 UTC m=+686.429641821" Dec 01 06:54:42 crc kubenswrapper[4632]: I1201 06:54:42.109410 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6ddddd9d6f-p8vpd" Dec 01 06:54:49 crc kubenswrapper[4632]: I1201 06:54:49.498050 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:54:49 crc kubenswrapper[4632]: I1201 06:54:49.498619 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.145826 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.147237 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.148916 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-k5wfn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.149792 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.150904 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.153024 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-mpcv6" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.160903 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.165532 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.206447 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.207785 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.213975 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-hjfzm" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.215369 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.236433 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.237726 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.240064 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-nhkkj" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.248411 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.249573 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.251309 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-9jsnc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.254250 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7nwm\" (UniqueName: \"kubernetes.io/projected/fb574298-9e57-474c-9f80-faa7be6cded8-kube-api-access-h7nwm\") pod \"cinder-operator-controller-manager-859b6ccc6-4lrqg\" (UID: \"fb574298-9e57-474c-9f80-faa7be6cded8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.254314 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssrrd\" (UniqueName: \"kubernetes.io/projected/3ad25430-83fc-45b0-83b1-adbe4e729508-kube-api-access-ssrrd\") pod \"barbican-operator-controller-manager-7d9dfd778-wpwjc\" (UID: \"3ad25430-83fc-45b0-83b1-adbe4e729508\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.260056 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.271128 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.272482 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.274659 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-86stj" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.276472 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.283733 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.293000 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.294109 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.297435 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.298625 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.303911 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.303955 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-dmbtf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.304057 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-x9wjh" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.316763 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.321473 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.354345 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.355431 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.355818 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz9gx\" (UniqueName: \"kubernetes.io/projected/749f148d-477b-4186-8c5a-ea9f86e4a64b-kube-api-access-sz9gx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xjrkn\" (UID: \"749f148d-477b-4186-8c5a-ea9f86e4a64b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.355870 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45sdg\" (UniqueName: \"kubernetes.io/projected/0bb7b633-65c0-4c4e-9fad-648fd779ff4a-kube-api-access-45sdg\") pod \"glance-operator-controller-manager-668d9c48b9-k89zc\" (UID: \"0bb7b633-65c0-4c4e-9fad-648fd779ff4a\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.355898 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7nwm\" (UniqueName: \"kubernetes.io/projected/fb574298-9e57-474c-9f80-faa7be6cded8-kube-api-access-h7nwm\") pod \"cinder-operator-controller-manager-859b6ccc6-4lrqg\" (UID: \"fb574298-9e57-474c-9f80-faa7be6cded8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.355945 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssrrd\" (UniqueName: \"kubernetes.io/projected/3ad25430-83fc-45b0-83b1-adbe4e729508-kube-api-access-ssrrd\") pod \"barbican-operator-controller-manager-7d9dfd778-wpwjc\" (UID: \"3ad25430-83fc-45b0-83b1-adbe4e729508\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.356090 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz497\" (UniqueName: \"kubernetes.io/projected/c5aba6fe-c38f-45ec-8057-a19b2636fe68-kube-api-access-bz497\") pod \"designate-operator-controller-manager-78b4bc895b-fxrdf\" (UID: \"c5aba6fe-c38f-45ec-8057-a19b2636fe68\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.360260 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-lrrxg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.361233 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.364244 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.365760 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.377128 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-fwbbz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.377302 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.378436 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.380226 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.381409 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.381821 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-k2wc9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.387587 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.388106 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-4xwtb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.394402 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.398634 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7nwm\" (UniqueName: \"kubernetes.io/projected/fb574298-9e57-474c-9f80-faa7be6cded8-kube-api-access-h7nwm\") pod \"cinder-operator-controller-manager-859b6ccc6-4lrqg\" (UID: \"fb574298-9e57-474c-9f80-faa7be6cded8\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.406063 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssrrd\" (UniqueName: \"kubernetes.io/projected/3ad25430-83fc-45b0-83b1-adbe4e729508-kube-api-access-ssrrd\") pod \"barbican-operator-controller-manager-7d9dfd778-wpwjc\" (UID: \"3ad25430-83fc-45b0-83b1-adbe4e729508\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.409423 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.418460 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.419508 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.424578 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-brqql" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.434280 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.435763 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.437265 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-9xwmm" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.438807 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457060 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457417 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz497\" (UniqueName: \"kubernetes.io/projected/c5aba6fe-c38f-45ec-8057-a19b2636fe68-kube-api-access-bz497\") pod \"designate-operator-controller-manager-78b4bc895b-fxrdf\" (UID: \"c5aba6fe-c38f-45ec-8057-a19b2636fe68\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457459 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdlck\" (UniqueName: \"kubernetes.io/projected/f191cde7-969a-4111-86cf-855623533060-kube-api-access-mdlck\") pod \"keystone-operator-controller-manager-546d4bdf48-7ct9v\" (UID: \"f191cde7-969a-4111-86cf-855623533060\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457504 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67b9l\" (UniqueName: \"kubernetes.io/projected/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-kube-api-access-67b9l\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457531 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9lqf\" (UniqueName: \"kubernetes.io/projected/ad78f229-4425-4bc0-9721-fcf6c2a067d7-kube-api-access-d9lqf\") pod \"ironic-operator-controller-manager-6c548fd776-hwjzz\" (UID: \"ad78f229-4425-4bc0-9721-fcf6c2a067d7\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457566 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz9gx\" (UniqueName: \"kubernetes.io/projected/749f148d-477b-4186-8c5a-ea9f86e4a64b-kube-api-access-sz9gx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xjrkn\" (UID: \"749f148d-477b-4186-8c5a-ea9f86e4a64b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457587 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457611 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45sdg\" (UniqueName: \"kubernetes.io/projected/0bb7b633-65c0-4c4e-9fad-648fd779ff4a-kube-api-access-45sdg\") pod \"glance-operator-controller-manager-668d9c48b9-k89zc\" (UID: \"0bb7b633-65c0-4c4e-9fad-648fd779ff4a\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.457665 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxqb4\" (UniqueName: \"kubernetes.io/projected/c470b4eb-c3ca-4117-89ec-5812e4cbcec1-kube-api-access-nxqb4\") pod \"horizon-operator-controller-manager-68c6d99b8f-knsc7\" (UID: \"c470b4eb-c3ca-4117-89ec-5812e4cbcec1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.478131 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.478335 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.478451 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.482143 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.482741 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.482910 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz497\" (UniqueName: \"kubernetes.io/projected/c5aba6fe-c38f-45ec-8057-a19b2636fe68-kube-api-access-bz497\") pod \"designate-operator-controller-manager-78b4bc895b-fxrdf\" (UID: \"c5aba6fe-c38f-45ec-8057-a19b2636fe68\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.483146 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45sdg\" (UniqueName: \"kubernetes.io/projected/0bb7b633-65c0-4c4e-9fad-648fd779ff4a-kube-api-access-45sdg\") pod \"glance-operator-controller-manager-668d9c48b9-k89zc\" (UID: \"0bb7b633-65c0-4c4e-9fad-648fd779ff4a\") " pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.483504 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz9gx\" (UniqueName: \"kubernetes.io/projected/749f148d-477b-4186-8c5a-ea9f86e4a64b-kube-api-access-sz9gx\") pod \"heat-operator-controller-manager-5f64f6f8bb-xjrkn\" (UID: \"749f148d-477b-4186-8c5a-ea9f86e4a64b\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.483708 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-9x8zk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.487290 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-5xmdd" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.491347 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.508826 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.513132 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.521679 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-x2k78"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.523136 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.525788 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.526435 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-ncp7r" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.535267 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-x2k78"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.539288 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.549760 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.551121 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.552176 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.555388 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-g8wsb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558796 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558843 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knjd4\" (UniqueName: \"kubernetes.io/projected/0d6924f1-38a5-434e-99b6-9f9a06ae0894-kube-api-access-knjd4\") pod \"octavia-operator-controller-manager-998648c74-g2j5h\" (UID: \"0d6924f1-38a5-434e-99b6-9f9a06ae0894\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558872 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558907 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl57t\" (UniqueName: \"kubernetes.io/projected/05397964-6686-490a-ab73-ec535a262794-kube-api-access-jl57t\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558934 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw22t\" (UniqueName: \"kubernetes.io/projected/768788a8-025e-4e79-a0ec-6bb23a14f72e-kube-api-access-kw22t\") pod \"manila-operator-controller-manager-6546668bfd-fcwbl\" (UID: \"768788a8-025e-4e79-a0ec-6bb23a14f72e\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558961 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxqb4\" (UniqueName: \"kubernetes.io/projected/c470b4eb-c3ca-4117-89ec-5812e4cbcec1-kube-api-access-nxqb4\") pod \"horizon-operator-controller-manager-68c6d99b8f-knsc7\" (UID: \"c470b4eb-c3ca-4117-89ec-5812e4cbcec1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558981 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7l22\" (UniqueName: \"kubernetes.io/projected/c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73-kube-api-access-z7l22\") pod \"nova-operator-controller-manager-697bc559fc-pdsxw\" (UID: \"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.558999 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ptmk\" (UniqueName: \"kubernetes.io/projected/254c38bb-3a55-426d-a497-69b3aa16c639-kube-api-access-5ptmk\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wnk9x\" (UID: \"254c38bb-3a55-426d-a497-69b3aa16c639\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.559059 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdlck\" (UniqueName: \"kubernetes.io/projected/f191cde7-969a-4111-86cf-855623533060-kube-api-access-mdlck\") pod \"keystone-operator-controller-manager-546d4bdf48-7ct9v\" (UID: \"f191cde7-969a-4111-86cf-855623533060\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.559082 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cvr9\" (UniqueName: \"kubernetes.io/projected/35336d69-2a15-4513-970c-19e86cbb339f-kube-api-access-4cvr9\") pod \"mariadb-operator-controller-manager-56bbcc9d85-r9gx9\" (UID: \"35336d69-2a15-4513-970c-19e86cbb339f\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.559106 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67b9l\" (UniqueName: \"kubernetes.io/projected/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-kube-api-access-67b9l\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.559130 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9lqf\" (UniqueName: \"kubernetes.io/projected/ad78f229-4425-4bc0-9721-fcf6c2a067d7-kube-api-access-d9lqf\") pod \"ironic-operator-controller-manager-6c548fd776-hwjzz\" (UID: \"ad78f229-4425-4bc0-9721-fcf6c2a067d7\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.559154 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxg2g\" (UniqueName: \"kubernetes.io/projected/ecb68de8-b267-4c69-baf4-078e3feacf8e-kube-api-access-vxg2g\") pod \"ovn-operator-controller-manager-b6456fdb6-7v8l7\" (UID: \"ecb68de8-b267-4c69-baf4-078e3feacf8e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:00 crc kubenswrapper[4632]: E1201 06:55:00.559273 4632 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:00 crc kubenswrapper[4632]: E1201 06:55:00.559325 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert podName:731aedb5-2e95-4d08-9a4e-6c27e64d5ea7 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:01.059308425 +0000 UTC m=+710.624321398 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert") pod "infra-operator-controller-manager-57548d458d-bbrm4" (UID: "731aedb5-2e95-4d08-9a4e-6c27e64d5ea7") : secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.564764 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.575825 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdlck\" (UniqueName: \"kubernetes.io/projected/f191cde7-969a-4111-86cf-855623533060-kube-api-access-mdlck\") pod \"keystone-operator-controller-manager-546d4bdf48-7ct9v\" (UID: \"f191cde7-969a-4111-86cf-855623533060\") " pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.576977 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxqb4\" (UniqueName: \"kubernetes.io/projected/c470b4eb-c3ca-4117-89ec-5812e4cbcec1-kube-api-access-nxqb4\") pod \"horizon-operator-controller-manager-68c6d99b8f-knsc7\" (UID: \"c470b4eb-c3ca-4117-89ec-5812e4cbcec1\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.577441 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9lqf\" (UniqueName: \"kubernetes.io/projected/ad78f229-4425-4bc0-9721-fcf6c2a067d7-kube-api-access-d9lqf\") pod \"ironic-operator-controller-manager-6c548fd776-hwjzz\" (UID: \"ad78f229-4425-4bc0-9721-fcf6c2a067d7\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.578405 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.581752 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67b9l\" (UniqueName: \"kubernetes.io/projected/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-kube-api-access-67b9l\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.595317 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.595826 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.597083 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.599594 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-v29f7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.630611 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.646854 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660323 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l78lv\" (UniqueName: \"kubernetes.io/projected/9744f748-86b6-417c-ab38-18cc3ad9b89a-kube-api-access-l78lv\") pod \"swift-operator-controller-manager-5f8c65bbfc-674sk\" (UID: \"9744f748-86b6-417c-ab38-18cc3ad9b89a\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660405 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cvr9\" (UniqueName: \"kubernetes.io/projected/35336d69-2a15-4513-970c-19e86cbb339f-kube-api-access-4cvr9\") pod \"mariadb-operator-controller-manager-56bbcc9d85-r9gx9\" (UID: \"35336d69-2a15-4513-970c-19e86cbb339f\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660439 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df6tw\" (UniqueName: \"kubernetes.io/projected/a9952ac0-b2d4-4717-823b-5f9f0338fb5f-kube-api-access-df6tw\") pod \"placement-operator-controller-manager-78f8948974-x2k78\" (UID: \"a9952ac0-b2d4-4717-823b-5f9f0338fb5f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660479 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxg2g\" (UniqueName: \"kubernetes.io/projected/ecb68de8-b267-4c69-baf4-078e3feacf8e-kube-api-access-vxg2g\") pod \"ovn-operator-controller-manager-b6456fdb6-7v8l7\" (UID: \"ecb68de8-b267-4c69-baf4-078e3feacf8e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660524 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knjd4\" (UniqueName: \"kubernetes.io/projected/0d6924f1-38a5-434e-99b6-9f9a06ae0894-kube-api-access-knjd4\") pod \"octavia-operator-controller-manager-998648c74-g2j5h\" (UID: \"0d6924f1-38a5-434e-99b6-9f9a06ae0894\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660547 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660566 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5gbk\" (UniqueName: \"kubernetes.io/projected/697c8fad-c587-41ce-ae4a-158bb22b6394-kube-api-access-p5gbk\") pod \"telemetry-operator-controller-manager-76cc84c6bb-x2252\" (UID: \"697c8fad-c587-41ce-ae4a-158bb22b6394\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660599 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl57t\" (UniqueName: \"kubernetes.io/projected/05397964-6686-490a-ab73-ec535a262794-kube-api-access-jl57t\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660625 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw22t\" (UniqueName: \"kubernetes.io/projected/768788a8-025e-4e79-a0ec-6bb23a14f72e-kube-api-access-kw22t\") pod \"manila-operator-controller-manager-6546668bfd-fcwbl\" (UID: \"768788a8-025e-4e79-a0ec-6bb23a14f72e\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660651 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7l22\" (UniqueName: \"kubernetes.io/projected/c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73-kube-api-access-z7l22\") pod \"nova-operator-controller-manager-697bc559fc-pdsxw\" (UID: \"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.660668 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ptmk\" (UniqueName: \"kubernetes.io/projected/254c38bb-3a55-426d-a497-69b3aa16c639-kube-api-access-5ptmk\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wnk9x\" (UID: \"254c38bb-3a55-426d-a497-69b3aa16c639\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:00 crc kubenswrapper[4632]: E1201 06:55:00.661136 4632 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:00 crc kubenswrapper[4632]: E1201 06:55:00.661185 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert podName:05397964-6686-490a-ab73-ec535a262794 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:01.161169687 +0000 UTC m=+710.726182660 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446x48x8" (UID: "05397964-6686-490a-ab73-ec535a262794") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.671435 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.672554 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.673566 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.677718 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-dk487" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.678196 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.680934 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ptmk\" (UniqueName: \"kubernetes.io/projected/254c38bb-3a55-426d-a497-69b3aa16c639-kube-api-access-5ptmk\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-wnk9x\" (UID: \"254c38bb-3a55-426d-a497-69b3aa16c639\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.682299 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxg2g\" (UniqueName: \"kubernetes.io/projected/ecb68de8-b267-4c69-baf4-078e3feacf8e-kube-api-access-vxg2g\") pod \"ovn-operator-controller-manager-b6456fdb6-7v8l7\" (UID: \"ecb68de8-b267-4c69-baf4-078e3feacf8e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.683447 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl57t\" (UniqueName: \"kubernetes.io/projected/05397964-6686-490a-ab73-ec535a262794-kube-api-access-jl57t\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.686138 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw22t\" (UniqueName: \"kubernetes.io/projected/768788a8-025e-4e79-a0ec-6bb23a14f72e-kube-api-access-kw22t\") pod \"manila-operator-controller-manager-6546668bfd-fcwbl\" (UID: \"768788a8-025e-4e79-a0ec-6bb23a14f72e\") " pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.694698 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cvr9\" (UniqueName: \"kubernetes.io/projected/35336d69-2a15-4513-970c-19e86cbb339f-kube-api-access-4cvr9\") pod \"mariadb-operator-controller-manager-56bbcc9d85-r9gx9\" (UID: \"35336d69-2a15-4513-970c-19e86cbb339f\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.695382 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knjd4\" (UniqueName: \"kubernetes.io/projected/0d6924f1-38a5-434e-99b6-9f9a06ae0894-kube-api-access-knjd4\") pod \"octavia-operator-controller-manager-998648c74-g2j5h\" (UID: \"0d6924f1-38a5-434e-99b6-9f9a06ae0894\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.697457 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7l22\" (UniqueName: \"kubernetes.io/projected/c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73-kube-api-access-z7l22\") pod \"nova-operator-controller-manager-697bc559fc-pdsxw\" (UID: \"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.697596 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.761761 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.762884 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l78lv\" (UniqueName: \"kubernetes.io/projected/9744f748-86b6-417c-ab38-18cc3ad9b89a-kube-api-access-l78lv\") pod \"swift-operator-controller-manager-5f8c65bbfc-674sk\" (UID: \"9744f748-86b6-417c-ab38-18cc3ad9b89a\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.762933 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df6tw\" (UniqueName: \"kubernetes.io/projected/a9952ac0-b2d4-4717-823b-5f9f0338fb5f-kube-api-access-df6tw\") pod \"placement-operator-controller-manager-78f8948974-x2k78\" (UID: \"a9952ac0-b2d4-4717-823b-5f9f0338fb5f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.762969 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fmqh\" (UniqueName: \"kubernetes.io/projected/5640bad0-ba52-4bc4-845d-d47987318155-kube-api-access-4fmqh\") pod \"test-operator-controller-manager-5854674fcc-4hqhb\" (UID: \"5640bad0-ba52-4bc4-845d-d47987318155\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.763025 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5gbk\" (UniqueName: \"kubernetes.io/projected/697c8fad-c587-41ce-ae4a-158bb22b6394-kube-api-access-p5gbk\") pod \"telemetry-operator-controller-manager-76cc84c6bb-x2252\" (UID: \"697c8fad-c587-41ce-ae4a-158bb22b6394\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.783048 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.783761 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.784841 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.784985 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.786566 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5gbk\" (UniqueName: \"kubernetes.io/projected/697c8fad-c587-41ce-ae4a-158bb22b6394-kube-api-access-p5gbk\") pod \"telemetry-operator-controller-manager-76cc84c6bb-x2252\" (UID: \"697c8fad-c587-41ce-ae4a-158bb22b6394\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.788300 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-x745n" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.795108 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df6tw\" (UniqueName: \"kubernetes.io/projected/a9952ac0-b2d4-4717-823b-5f9f0338fb5f-kube-api-access-df6tw\") pod \"placement-operator-controller-manager-78f8948974-x2k78\" (UID: \"a9952ac0-b2d4-4717-823b-5f9f0338fb5f\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.797562 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l78lv\" (UniqueName: \"kubernetes.io/projected/9744f748-86b6-417c-ab38-18cc3ad9b89a-kube-api-access-l78lv\") pod \"swift-operator-controller-manager-5f8c65bbfc-674sk\" (UID: \"9744f748-86b6-417c-ab38-18cc3ad9b89a\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.811877 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.851212 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.857808 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.865101 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fmqh\" (UniqueName: \"kubernetes.io/projected/5640bad0-ba52-4bc4-845d-d47987318155-kube-api-access-4fmqh\") pod \"test-operator-controller-manager-5854674fcc-4hqhb\" (UID: \"5640bad0-ba52-4bc4-845d-d47987318155\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.865226 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-524k2\" (UniqueName: \"kubernetes.io/projected/756b1531-b2e5-4a10-aad8-ae2378b09a68-kube-api-access-524k2\") pod \"watcher-operator-controller-manager-769dc69bc-zkfsl\" (UID: \"756b1531-b2e5-4a10-aad8-ae2378b09a68\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.868263 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.869708 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.872724 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.873443 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.873642 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-rpdct" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.879741 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.885236 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fmqh\" (UniqueName: \"kubernetes.io/projected/5640bad0-ba52-4bc4-845d-d47987318155-kube-api-access-4fmqh\") pod \"test-operator-controller-manager-5854674fcc-4hqhb\" (UID: \"5640bad0-ba52-4bc4-845d-d47987318155\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.890730 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.899153 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.921322 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.969066 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.970004 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.970068 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-524k2\" (UniqueName: \"kubernetes.io/projected/756b1531-b2e5-4a10-aad8-ae2378b09a68-kube-api-access-524k2\") pod \"watcher-operator-controller-manager-769dc69bc-zkfsl\" (UID: \"756b1531-b2e5-4a10-aad8-ae2378b09a68\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.970137 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnzpd\" (UniqueName: \"kubernetes.io/projected/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-kube-api-access-gnzpd\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.976828 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.983507 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.985306 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-5pwjq" Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.988502 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb"] Dec 01 06:55:00 crc kubenswrapper[4632]: I1201 06:55:00.990284 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-524k2\" (UniqueName: \"kubernetes.io/projected/756b1531-b2e5-4a10-aad8-ae2378b09a68-kube-api-access-524k2\") pod \"watcher-operator-controller-manager-769dc69bc-zkfsl\" (UID: \"756b1531-b2e5-4a10-aad8-ae2378b09a68\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.027974 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.074431 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.074650 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.074584 4632 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.074795 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert podName:731aedb5-2e95-4d08-9a4e-6c27e64d5ea7 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:02.074762583 +0000 UTC m=+711.639775556 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert") pod "infra-operator-controller-manager-57548d458d-bbrm4" (UID: "731aedb5-2e95-4d08-9a4e-6c27e64d5ea7") : secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.074796 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.074858 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:01.57485012 +0000 UTC m=+711.139863092 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.075162 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnzpd\" (UniqueName: \"kubernetes.io/projected/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-kube-api-access-gnzpd\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.075248 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7jfm\" (UniqueName: \"kubernetes.io/projected/ce060aca-e2c3-4454-b126-719a572ece48-kube-api-access-x7jfm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7jjwb\" (UID: \"ce060aca-e2c3-4454-b126-719a572ece48\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.075649 4632 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.075699 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:01.575685605 +0000 UTC m=+711.140698578 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "metrics-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.075723 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.129334 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.130094 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnzpd\" (UniqueName: \"kubernetes.io/projected/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-kube-api-access-gnzpd\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.179789 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.179930 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7jfm\" (UniqueName: \"kubernetes.io/projected/ce060aca-e2c3-4454-b126-719a572ece48-kube-api-access-x7jfm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7jjwb\" (UID: \"ce060aca-e2c3-4454-b126-719a572ece48\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.181540 4632 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.181614 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert podName:05397964-6686-490a-ab73-ec535a262794 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:02.181594885 +0000 UTC m=+711.746607859 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446x48x8" (UID: "05397964-6686-490a-ab73-ec535a262794") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.232945 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7jfm\" (UniqueName: \"kubernetes.io/projected/ce060aca-e2c3-4454-b126-719a572ece48-kube-api-access-x7jfm\") pod \"rabbitmq-cluster-operator-manager-668c99d594-7jjwb\" (UID: \"ce060aca-e2c3-4454-b126-719a572ece48\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.310800 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.316756 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.338574 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf"] Dec 01 06:55:01 crc kubenswrapper[4632]: W1201 06:55:01.351508 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb574298_9e57_474c_9f80_faa7be6cded8.slice/crio-e28c62492b11510d09f8668c0a38c169a0eb65c81fb51c42d04a6e560b808209 WatchSource:0}: Error finding container e28c62492b11510d09f8668c0a38c169a0eb65c81fb51c42d04a6e560b808209: Status 404 returned error can't find the container with id e28c62492b11510d09f8668c0a38c169a0eb65c81fb51c42d04a6e560b808209 Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.355653 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" Dec 01 06:55:01 crc kubenswrapper[4632]: W1201 06:55:01.358980 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc5aba6fe_c38f_45ec_8057_a19b2636fe68.slice/crio-228b227320eee778df381af66fbed5fde60ad339dceb6be486781333ae562cb0 WatchSource:0}: Error finding container 228b227320eee778df381af66fbed5fde60ad339dceb6be486781333ae562cb0: Status 404 returned error can't find the container with id 228b227320eee778df381af66fbed5fde60ad339dceb6be486781333ae562cb0 Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.362889 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.588444 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.588635 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.588712 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:02.588693288 +0000 UTC m=+712.153706261 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.588650 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.588755 4632 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.588815 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:02.588797054 +0000 UTC m=+712.153810027 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "metrics-server-cert" not found Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.694273 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7"] Dec 01 06:55:01 crc kubenswrapper[4632]: W1201 06:55:01.703690 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc470b4eb_c3ca_4117_89ec_5812e4cbcec1.slice/crio-6a0fb17e972083db8b4393cded97519eb0975de0d3db39b639d9532307068316 WatchSource:0}: Error finding container 6a0fb17e972083db8b4393cded97519eb0975de0d3db39b639d9532307068316: Status 404 returned error can't find the container with id 6a0fb17e972083db8b4393cded97519eb0975de0d3db39b639d9532307068316 Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.727166 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl"] Dec 01 06:55:01 crc kubenswrapper[4632]: W1201 06:55:01.729288 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod768788a8_025e_4e79_a0ec_6bb23a14f72e.slice/crio-5c66db62e646aca84ee1b948afbe841c8f2b34aa4b08417aaf62a6726c2e3c21 WatchSource:0}: Error finding container 5c66db62e646aca84ee1b948afbe841c8f2b34aa4b08417aaf62a6726c2e3c21: Status 404 returned error can't find the container with id 5c66db62e646aca84ee1b948afbe841c8f2b34aa4b08417aaf62a6726c2e3c21 Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.733914 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz"] Dec 01 06:55:01 crc kubenswrapper[4632]: W1201 06:55:01.735797 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad78f229_4425_4bc0_9721_fcf6c2a067d7.slice/crio-b535a5cef986fac9afa6abe7c524b93de4887e727649a31ea55487058b4eba3f WatchSource:0}: Error finding container b535a5cef986fac9afa6abe7c524b93de4887e727649a31ea55487058b4eba3f: Status 404 returned error can't find the container with id b535a5cef986fac9afa6abe7c524b93de4887e727649a31ea55487058b4eba3f Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.810913 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.819510 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.831473 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.902868 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7"] Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.906838 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x7jfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-7jjwb_openstack-operators(ce060aca-e2c3-4454-b126-719a572ece48): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.908445 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" podUID="ce060aca-e2c3-4454-b126-719a572ece48" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.910728 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9"] Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.918033 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fmqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-4hqhb_openstack-operators(5640bad0-ba52-4bc4-845d-d47987318155): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.920563 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fmqh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-4hqhb_openstack-operators(5640bad0-ba52-4bc4-845d-d47987318155): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.921814 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" podUID="5640bad0-ba52-4bc4-845d-d47987318155" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.924345 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.946573 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.946615 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-x2k78"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.949092 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.953736 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.957254 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x"] Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.962379 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw"] Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.963605 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5ptmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_openstack-operators(254c38bb-3a55-426d-a497-69b3aa16c639): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.965825 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252"] Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.970019 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l78lv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-674sk_openstack-operators(9744f748-86b6-417c-ab38-18cc3ad9b89a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.970031 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-524k2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-zkfsl_openstack-operators(756b1531-b2e5-4a10-aad8-ae2378b09a68): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.970158 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-df6tw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-x2k78_openstack-operators(a9952ac0-b2d4-4717-823b-5f9f0338fb5f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.970184 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p5gbk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-x2252_openstack-operators(697c8fad-c587-41ce-ae4a-158bb22b6394): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.970552 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5ptmk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_openstack-operators(254c38bb-3a55-426d-a497-69b3aa16c639): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.971679 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" podUID="254c38bb-3a55-426d-a497-69b3aa16c639" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.972212 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-524k2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-zkfsl_openstack-operators(756b1531-b2e5-4a10-aad8-ae2378b09a68): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.972253 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p5gbk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-x2252_openstack-operators(697c8fad-c587-41ce-ae4a-158bb22b6394): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.972332 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l78lv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-674sk_openstack-operators(9744f748-86b6-417c-ab38-18cc3ad9b89a): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.972867 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-df6tw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-x2k78_openstack-operators(a9952ac0-b2d4-4717-823b-5f9f0338fb5f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.974218 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" podUID="9744f748-86b6-417c-ab38-18cc3ad9b89a" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.974229 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" podUID="a9952ac0-b2d4-4717-823b-5f9f0338fb5f" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.974264 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" podUID="756b1531-b2e5-4a10-aad8-ae2378b09a68" Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.974281 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" podUID="697c8fad-c587-41ce-ae4a-158bb22b6394" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.992539 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" event={"ID":"35336d69-2a15-4513-970c-19e86cbb339f","Type":"ContainerStarted","Data":"4758101a4975f04488b7c1203fe59505d0e48aaf5c9cc78b315ff60e8b679a99"} Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.993758 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" event={"ID":"0bb7b633-65c0-4c4e-9fad-648fd779ff4a","Type":"ContainerStarted","Data":"08a3c97edfc712c6c37e63231cc507a2553b560301e6a181751e46ab51173143"} Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.995141 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" event={"ID":"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73","Type":"ContainerStarted","Data":"9a352199be0dad0dfb2055311f8bfa670005ecff854f02f47920140efa173589"} Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.995838 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" event={"ID":"0d6924f1-38a5-434e-99b6-9f9a06ae0894","Type":"ContainerStarted","Data":"c3b9baa7fbd45af790ea2d427ca9528ee9a793199b53815ce107057c9d1363e7"} Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.997449 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" event={"ID":"697c8fad-c587-41ce-ae4a-158bb22b6394","Type":"ContainerStarted","Data":"47d57356162265a040b8570da262e8d851239cc133d6383152a9c12b69f4422f"} Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.998569 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" event={"ID":"768788a8-025e-4e79-a0ec-6bb23a14f72e","Type":"ContainerStarted","Data":"5c66db62e646aca84ee1b948afbe841c8f2b34aa4b08417aaf62a6726c2e3c21"} Dec 01 06:55:01 crc kubenswrapper[4632]: E1201 06:55:01.999314 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" podUID="697c8fad-c587-41ce-ae4a-158bb22b6394" Dec 01 06:55:01 crc kubenswrapper[4632]: I1201 06:55:01.999468 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" event={"ID":"756b1531-b2e5-4a10-aad8-ae2378b09a68","Type":"ContainerStarted","Data":"81f729c61cfd53a1382df438ba2acf844993fb6d4ab56c28e7937c98c3342ed7"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.000443 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" event={"ID":"c5aba6fe-c38f-45ec-8057-a19b2636fe68","Type":"ContainerStarted","Data":"228b227320eee778df381af66fbed5fde60ad339dceb6be486781333ae562cb0"} Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.000717 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" podUID="756b1531-b2e5-4a10-aad8-ae2378b09a68" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.001693 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" event={"ID":"fb574298-9e57-474c-9f80-faa7be6cded8","Type":"ContainerStarted","Data":"e28c62492b11510d09f8668c0a38c169a0eb65c81fb51c42d04a6e560b808209"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.002992 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" event={"ID":"ecb68de8-b267-4c69-baf4-078e3feacf8e","Type":"ContainerStarted","Data":"286fd1ad580b2c9a4827ca9349144a84eef1d22484801e788ab43a7008610d30"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.004608 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" event={"ID":"749f148d-477b-4186-8c5a-ea9f86e4a64b","Type":"ContainerStarted","Data":"f1fa500c3549b9d7fee04527ed38e009f33134eebb686a96cde129301c2af0aa"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.005572 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" event={"ID":"a9952ac0-b2d4-4717-823b-5f9f0338fb5f","Type":"ContainerStarted","Data":"577c367a415a6ed528d2f6103989bd3a6dc770219be65294306e4d4a05945e00"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.006919 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" event={"ID":"ce060aca-e2c3-4454-b126-719a572ece48","Type":"ContainerStarted","Data":"0ff14647ddd9ae4e131e6105d44f9cb1994e393c18ce0cf9894f7ff699f4440c"} Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.008152 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" podUID="a9952ac0-b2d4-4717-823b-5f9f0338fb5f" Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.011287 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" podUID="ce060aca-e2c3-4454-b126-719a572ece48" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.011307 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" event={"ID":"254c38bb-3a55-426d-a497-69b3aa16c639","Type":"ContainerStarted","Data":"406de8f7f4f33f8e0421d137397fc813096b4dd8ce426b38f7f3d5142ff02c44"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.012597 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" event={"ID":"5640bad0-ba52-4bc4-845d-d47987318155","Type":"ContainerStarted","Data":"3b2ff251fa55bf78f506d35b07166b9952d8e304bf7c04ee6719127b8c07412a"} Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.012829 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" podUID="254c38bb-3a55-426d-a497-69b3aa16c639" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.014453 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" event={"ID":"f191cde7-969a-4111-86cf-855623533060","Type":"ContainerStarted","Data":"8aae749dbcb64dc4463354d0c5516ebbfb3ef2e9432fdf855f3c69ee83f2a15b"} Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.014465 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" podUID="5640bad0-ba52-4bc4-845d-d47987318155" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.019218 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" event={"ID":"ad78f229-4425-4bc0-9721-fcf6c2a067d7","Type":"ContainerStarted","Data":"b535a5cef986fac9afa6abe7c524b93de4887e727649a31ea55487058b4eba3f"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.022980 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" event={"ID":"9744f748-86b6-417c-ab38-18cc3ad9b89a","Type":"ContainerStarted","Data":"905bc1f8ce4862d051a28a24107db6db677d12abd18605a18c68ce09edcd0355"} Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.026043 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" podUID="9744f748-86b6-417c-ab38-18cc3ad9b89a" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.026335 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" event={"ID":"3ad25430-83fc-45b0-83b1-adbe4e729508","Type":"ContainerStarted","Data":"d3f7bc8dbdfe40065b2219347eecc5f114b01a05707f1e848404550695970907"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.030169 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" event={"ID":"c470b4eb-c3ca-4117-89ec-5812e4cbcec1","Type":"ContainerStarted","Data":"6a0fb17e972083db8b4393cded97519eb0975de0d3db39b639d9532307068316"} Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.124768 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.125421 4632 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.125560 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert podName:731aedb5-2e95-4d08-9a4e-6c27e64d5ea7 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:04.125535426 +0000 UTC m=+713.690548399 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert") pod "infra-operator-controller-manager-57548d458d-bbrm4" (UID: "731aedb5-2e95-4d08-9a4e-6c27e64d5ea7") : secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.228207 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.228726 4632 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.230222 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert podName:05397964-6686-490a-ab73-ec535a262794 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:04.230192453 +0000 UTC m=+713.795205427 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446x48x8" (UID: "05397964-6686-490a-ab73-ec535a262794") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.635524 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:02 crc kubenswrapper[4632]: I1201 06:55:02.635677 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.635704 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.635792 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:04.63577173 +0000 UTC m=+714.200784702 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.635822 4632 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 06:55:02 crc kubenswrapper[4632]: E1201 06:55:02.635957 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:04.635860647 +0000 UTC m=+714.200873620 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "metrics-server-cert" not found Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.050390 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" podUID="254c38bb-3a55-426d-a497-69b3aa16c639" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.050675 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" podUID="ce060aca-e2c3-4454-b126-719a572ece48" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.051080 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" podUID="9744f748-86b6-417c-ab38-18cc3ad9b89a" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.051937 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" podUID="5640bad0-ba52-4bc4-845d-d47987318155" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.051993 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" podUID="a9952ac0-b2d4-4717-823b-5f9f0338fb5f" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.052041 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" podUID="697c8fad-c587-41ce-ae4a-158bb22b6394" Dec 01 06:55:03 crc kubenswrapper[4632]: E1201 06:55:03.052179 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" podUID="756b1531-b2e5-4a10-aad8-ae2378b09a68" Dec 01 06:55:04 crc kubenswrapper[4632]: I1201 06:55:04.179733 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.179962 4632 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.180072 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert podName:731aedb5-2e95-4d08-9a4e-6c27e64d5ea7 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:08.180050445 +0000 UTC m=+717.745063418 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert") pod "infra-operator-controller-manager-57548d458d-bbrm4" (UID: "731aedb5-2e95-4d08-9a4e-6c27e64d5ea7") : secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: I1201 06:55:04.281616 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.281824 4632 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.281908 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert podName:05397964-6686-490a-ab73-ec535a262794 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:08.281889093 +0000 UTC m=+717.846902067 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446x48x8" (UID: "05397964-6686-490a-ab73-ec535a262794") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: I1201 06:55:04.688658 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:04 crc kubenswrapper[4632]: I1201 06:55:04.688751 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.688944 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.688997 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:08.688981165 +0000 UTC m=+718.253994137 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.689565 4632 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 06:55:04 crc kubenswrapper[4632]: E1201 06:55:04.689597 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:08.689587849 +0000 UTC m=+718.254600822 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "metrics-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: I1201 06:55:08.264168 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.264390 4632 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.264550 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert podName:731aedb5-2e95-4d08-9a4e-6c27e64d5ea7 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:16.264507208 +0000 UTC m=+725.829520181 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert") pod "infra-operator-controller-manager-57548d458d-bbrm4" (UID: "731aedb5-2e95-4d08-9a4e-6c27e64d5ea7") : secret "infra-operator-webhook-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: I1201 06:55:08.365681 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.365877 4632 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.365949 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert podName:05397964-6686-490a-ab73-ec535a262794 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:16.365930319 +0000 UTC m=+725.930943292 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert") pod "openstack-baremetal-operator-controller-manager-6698bcb446x48x8" (UID: "05397964-6686-490a-ab73-ec535a262794") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: I1201 06:55:08.771095 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:08 crc kubenswrapper[4632]: I1201 06:55:08.771217 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.771302 4632 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.771421 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:16.771393481 +0000 UTC m=+726.336406454 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "metrics-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.771456 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:08 crc kubenswrapper[4632]: E1201 06:55:08.771590 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:16.771562019 +0000 UTC m=+726.336574991 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:12 crc kubenswrapper[4632]: I1201 06:55:12.106327 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" event={"ID":"0bb7b633-65c0-4c4e-9fad-648fd779ff4a","Type":"ContainerStarted","Data":"ea6accd09d01f656a6386de75e55e0fe19410e6e6f9d98b05278e897cb96999c"} Dec 01 06:55:12 crc kubenswrapper[4632]: I1201 06:55:12.108332 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" event={"ID":"ad78f229-4425-4bc0-9721-fcf6c2a067d7","Type":"ContainerStarted","Data":"4c6f849a1b97bfd02c777f0178b89f4fb2dfc43a2a3dbea970184b7d899eeb3e"} Dec 01 06:55:12 crc kubenswrapper[4632]: I1201 06:55:12.114170 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" event={"ID":"768788a8-025e-4e79-a0ec-6bb23a14f72e","Type":"ContainerStarted","Data":"621fb89856b63ad1361cdcf32b68f7f0e52b8df39f19d375a80f3d528d69caee"} Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.184455 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-vxg2g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-7v8l7_openstack-operators(ecb68de8-b267-4c69-baf4-078e3feacf8e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.186120 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" podUID="ecb68de8-b267-4c69-baf4-078e3feacf8e" Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.189024 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sz9gx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-xjrkn_openstack-operators(749f148d-477b-4186-8c5a-ea9f86e4a64b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.190144 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" podUID="749f148d-477b-4186-8c5a-ea9f86e4a64b" Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.199326 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h7nwm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-4lrqg_openstack-operators(fb574298-9e57-474c-9f80-faa7be6cded8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 01 06:55:12 crc kubenswrapper[4632]: E1201 06:55:12.200583 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" podUID="fb574298-9e57-474c-9f80-faa7be6cded8" Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.124579 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" event={"ID":"0d6924f1-38a5-434e-99b6-9f9a06ae0894","Type":"ContainerStarted","Data":"364d44d5031824b9daa65a7ba65b343b0e027e12db5d456b9b786877569f21d9"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.126731 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" event={"ID":"c5aba6fe-c38f-45ec-8057-a19b2636fe68","Type":"ContainerStarted","Data":"ecc8d82316dae66e2ae4dca097aae02e8747cdc923a9ead81a1afddc5866cfe4"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.129981 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" event={"ID":"fb574298-9e57-474c-9f80-faa7be6cded8","Type":"ContainerStarted","Data":"7a176e07462f48879d8a8ae133f9416eeafc8386d45428adfb485cd28313f78f"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.130065 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:13 crc kubenswrapper[4632]: E1201 06:55:13.132562 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" podUID="fb574298-9e57-474c-9f80-faa7be6cded8" Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.133148 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" event={"ID":"ecb68de8-b267-4c69-baf4-078e3feacf8e","Type":"ContainerStarted","Data":"6d804d74f7bf0efe633f2522863d3f68f8a30f0f845f630ee932810aad9b67c2"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.133865 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:13 crc kubenswrapper[4632]: E1201 06:55:13.139863 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" podUID="ecb68de8-b267-4c69-baf4-078e3feacf8e" Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.142781 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" event={"ID":"749f148d-477b-4186-8c5a-ea9f86e4a64b","Type":"ContainerStarted","Data":"64ae7576add7cc6208fcbbaea1c08c45761ead40c4b8610d0f63d95b41eeb91d"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.142881 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:13 crc kubenswrapper[4632]: E1201 06:55:13.144545 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" podUID="749f148d-477b-4186-8c5a-ea9f86e4a64b" Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.146908 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" event={"ID":"35336d69-2a15-4513-970c-19e86cbb339f","Type":"ContainerStarted","Data":"99bfb8080a14c7647131f0f80aa7729c092648c8fda484233b9a6b53ff4a71a6"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.149554 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" event={"ID":"3ad25430-83fc-45b0-83b1-adbe4e729508","Type":"ContainerStarted","Data":"e288ef2cc9d8c95c292c2a3a31a612111fcdae2970287707aeb572e07262f77d"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.153715 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" event={"ID":"c470b4eb-c3ca-4117-89ec-5812e4cbcec1","Type":"ContainerStarted","Data":"9360860f28b869b69190a6ea9831b3f6ea27f158479be05b9ff21cbdb3fc731d"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.156090 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" event={"ID":"f191cde7-969a-4111-86cf-855623533060","Type":"ContainerStarted","Data":"22d7ce359156504b4babba138b2b3c165c12d9702e7c5d824c133af3ffaaca49"} Dec 01 06:55:13 crc kubenswrapper[4632]: I1201 06:55:13.174097 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" event={"ID":"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73","Type":"ContainerStarted","Data":"66fe2e6cdd599dbe441756970c948286f6fb0e7ff776773ef3009c9650946315"} Dec 01 06:55:14 crc kubenswrapper[4632]: E1201 06:55:14.186447 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" podUID="fb574298-9e57-474c-9f80-faa7be6cded8" Dec 01 06:55:14 crc kubenswrapper[4632]: E1201 06:55:14.186825 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" podUID="749f148d-477b-4186-8c5a-ea9f86e4a64b" Dec 01 06:55:14 crc kubenswrapper[4632]: E1201 06:55:14.186933 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" podUID="ecb68de8-b267-4c69-baf4-078e3feacf8e" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.302790 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.308067 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/731aedb5-2e95-4d08-9a4e-6c27e64d5ea7-cert\") pod \"infra-operator-controller-manager-57548d458d-bbrm4\" (UID: \"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.404493 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.408624 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/05397964-6686-490a-ab73-ec535a262794-cert\") pod \"openstack-baremetal-operator-controller-manager-6698bcb446x48x8\" (UID: \"05397964-6686-490a-ab73-ec535a262794\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.480576 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-5xmdd" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.487578 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.521083 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-dmbtf" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.528613 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.820256 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.820870 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:16 crc kubenswrapper[4632]: E1201 06:55:16.821047 4632 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 01 06:55:16 crc kubenswrapper[4632]: E1201 06:55:16.821093 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs podName:0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9 nodeName:}" failed. No retries permitted until 2025-12-01 06:55:32.821078725 +0000 UTC m=+742.386091688 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs") pod "openstack-operator-controller-manager-656fd97d56-dfqrf" (UID: "0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9") : secret "webhook-server-cert" not found Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.830950 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-metrics-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:16 crc kubenswrapper[4632]: I1201 06:55:16.959897 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4"] Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.048680 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8"] Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.227548 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" event={"ID":"0d6924f1-38a5-434e-99b6-9f9a06ae0894","Type":"ContainerStarted","Data":"cd8fdccd79bf54635fab686bed0308314384ea647fbfb418255240b9066f0295"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.228205 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.230654 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.243392 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" event={"ID":"35336d69-2a15-4513-970c-19e86cbb339f","Type":"ContainerStarted","Data":"34f60673df4f70a290bca986e2442a7a0b181c5a3cc8cbfbf44649a4e8894a98"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.244210 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.251324 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-g2j5h" podStartSLOduration=2.839808438 podStartE2EDuration="17.251313648s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.813733838 +0000 UTC m=+711.378746812" lastFinishedPulling="2025-12-01 06:55:16.225239048 +0000 UTC m=+725.790252022" observedRunningTime="2025-12-01 06:55:17.248475814 +0000 UTC m=+726.813488797" watchObservedRunningTime="2025-12-01 06:55:17.251313648 +0000 UTC m=+726.816326621" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.254669 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" event={"ID":"a9952ac0-b2d4-4717-823b-5f9f0338fb5f","Type":"ContainerStarted","Data":"e36fda8071ab1b3a522437d68378d5df70875df394e6eaedf8bc650adcea9a84"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.254745 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" event={"ID":"a9952ac0-b2d4-4717-823b-5f9f0338fb5f","Type":"ContainerStarted","Data":"95900b8dbf0b380299a1fb0b974a7d961b7a3c5786c612d5b6e489442d3ae1e5"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.254798 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.254955 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.267507 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" event={"ID":"f191cde7-969a-4111-86cf-855623533060","Type":"ContainerStarted","Data":"89bb7c601317d077b012c0874f0ff8b775d14c1f7369493774d1188b3c26db58"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.268437 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.271516 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.271911 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" event={"ID":"254c38bb-3a55-426d-a497-69b3aa16c639","Type":"ContainerStarted","Data":"256a109c92cfe555f4a67b16729fbfa75e58033ed6575196801d385f06629988"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.271963 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" event={"ID":"254c38bb-3a55-426d-a497-69b3aa16c639","Type":"ContainerStarted","Data":"51843d3658a0ddb646594f85dc86df4fd598f9ea254abba4125314bed7e6f486"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.272200 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.296454 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" event={"ID":"0bb7b633-65c0-4c4e-9fad-648fd779ff4a","Type":"ContainerStarted","Data":"3655d9b4242f35bba353dfd26c933ff85482a52525e89af063c4fe2d4f81f3f8"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.297165 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.304792 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.314481 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-r9gx9" podStartSLOduration=2.929732295 podStartE2EDuration="17.314469655s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.905043099 +0000 UTC m=+711.470056072" lastFinishedPulling="2025-12-01 06:55:16.289780459 +0000 UTC m=+725.854793432" observedRunningTime="2025-12-01 06:55:17.288670996 +0000 UTC m=+726.853683969" watchObservedRunningTime="2025-12-01 06:55:17.314469655 +0000 UTC m=+726.879482627" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.329010 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" event={"ID":"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7","Type":"ContainerStarted","Data":"fe61501024f38e84d412c3c585b19a548f3716cbde352ae2fcf46abdb546238a"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.337029 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" podStartSLOduration=3.099086211 podStartE2EDuration="17.337018011s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.970085202 +0000 UTC m=+711.535098176" lastFinishedPulling="2025-12-01 06:55:16.208017002 +0000 UTC m=+725.773029976" observedRunningTime="2025-12-01 06:55:17.321010787 +0000 UTC m=+726.886023770" watchObservedRunningTime="2025-12-01 06:55:17.337018011 +0000 UTC m=+726.902030984" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.347575 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" event={"ID":"768788a8-025e-4e79-a0ec-6bb23a14f72e","Type":"ContainerStarted","Data":"7ef57dbe301f2e89f08e59bd8404a9a7dce530b04c845fb0929bdc3c6754c01f"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.349761 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.350738 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.352272 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" event={"ID":"3ad25430-83fc-45b0-83b1-adbe4e729508","Type":"ContainerStarted","Data":"cbfa073dc1ca2f3ce11e89be8e84ed48cdcc819d03bae82184efb74902c59a5c"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.353788 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.357712 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.358641 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" event={"ID":"c5aba6fe-c38f-45ec-8057-a19b2636fe68","Type":"ContainerStarted","Data":"79cdb24e1aeb745922f4646dceeb0c2d4e9ca1225b0928f64c3675ea91738523"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.359114 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.360379 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.360933 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" event={"ID":"05397964-6686-490a-ab73-ec535a262794","Type":"ContainerStarted","Data":"f9b63fe20fc1fdc7519266ea106bcbe15b6c83cbd4cc02c0ea421b9ffb84d58b"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.363113 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" event={"ID":"c470b4eb-c3ca-4117-89ec-5812e4cbcec1","Type":"ContainerStarted","Data":"051309c5f7f881a2480aa0b87d1f58565cfcb8adccc5c889dabbcef0bba63d07"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.363923 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.365836 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.371531 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" event={"ID":"c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73","Type":"ContainerStarted","Data":"0dcf9081e23da0536e5c31c3c89575cf86d624b31323474b970bd38df394761b"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.372149 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.373345 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.373556 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" podStartSLOduration=3.119199309 podStartE2EDuration="17.373534331s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.963454821 +0000 UTC m=+711.528467794" lastFinishedPulling="2025-12-01 06:55:16.217789842 +0000 UTC m=+725.782802816" observedRunningTime="2025-12-01 06:55:17.349645537 +0000 UTC m=+726.914658510" watchObservedRunningTime="2025-12-01 06:55:17.373534331 +0000 UTC m=+726.938547305" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.396479 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" event={"ID":"ad78f229-4425-4bc0-9721-fcf6c2a067d7","Type":"ContainerStarted","Data":"204b4f0a80e1aa8492075b214b3e0294468ed881c7ba6d1d7a3309b11b1814e8"} Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.397070 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.402721 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.413857 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-668d9c48b9-k89zc" podStartSLOduration=2.605115819 podStartE2EDuration="17.413844091s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.457816415 +0000 UTC m=+711.022829388" lastFinishedPulling="2025-12-01 06:55:16.266544687 +0000 UTC m=+725.831557660" observedRunningTime="2025-12-01 06:55:17.412677079 +0000 UTC m=+726.977690051" watchObservedRunningTime="2025-12-01 06:55:17.413844091 +0000 UTC m=+726.978857064" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.414416 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-546d4bdf48-7ct9v" podStartSLOduration=2.955396486 podStartE2EDuration="17.414409998s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.81953264 +0000 UTC m=+711.384545613" lastFinishedPulling="2025-12-01 06:55:16.278546151 +0000 UTC m=+725.843559125" observedRunningTime="2025-12-01 06:55:17.396644567 +0000 UTC m=+726.961657540" watchObservedRunningTime="2025-12-01 06:55:17.414409998 +0000 UTC m=+726.979422971" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.433489 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6546668bfd-fcwbl" podStartSLOduration=2.9463218639999997 podStartE2EDuration="17.43347951s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.732392487 +0000 UTC m=+711.297405461" lastFinishedPulling="2025-12-01 06:55:16.219550135 +0000 UTC m=+725.784563107" observedRunningTime="2025-12-01 06:55:17.428619289 +0000 UTC m=+726.993632263" watchObservedRunningTime="2025-12-01 06:55:17.43347951 +0000 UTC m=+726.998492483" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.451410 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-knsc7" podStartSLOduration=2.952374441 podStartE2EDuration="17.451387009s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.705842067 +0000 UTC m=+711.270855031" lastFinishedPulling="2025-12-01 06:55:16.204854626 +0000 UTC m=+725.769867599" observedRunningTime="2025-12-01 06:55:17.451255632 +0000 UTC m=+727.016268604" watchObservedRunningTime="2025-12-01 06:55:17.451387009 +0000 UTC m=+727.016399982" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.469817 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-wpwjc" podStartSLOduration=2.616471281 podStartE2EDuration="17.469801404s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.351642065 +0000 UTC m=+710.916655037" lastFinishedPulling="2025-12-01 06:55:16.204972188 +0000 UTC m=+725.769985160" observedRunningTime="2025-12-01 06:55:17.468085908 +0000 UTC m=+727.033098880" watchObservedRunningTime="2025-12-01 06:55:17.469801404 +0000 UTC m=+727.034814377" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.514968 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-fxrdf" podStartSLOduration=2.6080318609999997 podStartE2EDuration="17.51495358s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.38966831 +0000 UTC m=+710.954681283" lastFinishedPulling="2025-12-01 06:55:16.296590028 +0000 UTC m=+725.861603002" observedRunningTime="2025-12-01 06:55:17.514331817 +0000 UTC m=+727.079344791" watchObservedRunningTime="2025-12-01 06:55:17.51495358 +0000 UTC m=+727.079966553" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.515643 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-pdsxw" podStartSLOduration=3.242158165 podStartE2EDuration="17.515638282s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.951761636 +0000 UTC m=+711.516774609" lastFinishedPulling="2025-12-01 06:55:16.225241754 +0000 UTC m=+725.790254726" observedRunningTime="2025-12-01 06:55:17.495267695 +0000 UTC m=+727.060280669" watchObservedRunningTime="2025-12-01 06:55:17.515638282 +0000 UTC m=+727.080651255" Dec 01 06:55:17 crc kubenswrapper[4632]: I1201 06:55:17.538459 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-hwjzz" podStartSLOduration=3.045015015 podStartE2EDuration="17.538433824s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.737367666 +0000 UTC m=+711.302380639" lastFinishedPulling="2025-12-01 06:55:16.230786475 +0000 UTC m=+725.795799448" observedRunningTime="2025-12-01 06:55:17.537841276 +0000 UTC m=+727.102854249" watchObservedRunningTime="2025-12-01 06:55:17.538433824 +0000 UTC m=+727.103446798" Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.413750 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" event={"ID":"756b1531-b2e5-4a10-aad8-ae2378b09a68","Type":"ContainerStarted","Data":"a525a1bbd1eff88c4658e14e1d77ba66c469b30625240f40524a13c17c95f635"} Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.414088 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" event={"ID":"756b1531-b2e5-4a10-aad8-ae2378b09a68","Type":"ContainerStarted","Data":"f1a4c1e2912977f16e2d22dd9747de09e17c7917b317134973c5e5e653a20aad"} Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.414317 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.417866 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" event={"ID":"9744f748-86b6-417c-ab38-18cc3ad9b89a","Type":"ContainerStarted","Data":"e5e56657142bcef325c8ad096ded9f2d84e81e38407d440e232c0664d18cfe8d"} Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.431045 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" podStartSLOduration=2.9467903890000002 podStartE2EDuration="19.431011935s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.96990882 +0000 UTC m=+711.534921793" lastFinishedPulling="2025-12-01 06:55:18.454130365 +0000 UTC m=+728.019143339" observedRunningTime="2025-12-01 06:55:19.427683094 +0000 UTC m=+728.992696068" watchObservedRunningTime="2025-12-01 06:55:19.431011935 +0000 UTC m=+728.996024908" Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.499168 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:55:19 crc kubenswrapper[4632]: I1201 06:55:19.499241 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:55:20 crc kubenswrapper[4632]: I1201 06:55:20.495991 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" Dec 01 06:55:20 crc kubenswrapper[4632]: I1201 06:55:20.569244 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" Dec 01 06:55:20 crc kubenswrapper[4632]: I1201 06:55:20.862395 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.465396 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" event={"ID":"749f148d-477b-4186-8c5a-ea9f86e4a64b","Type":"ContainerStarted","Data":"9c5524fdc56ad2f9f7b83a4c5f0e610a5bbb4968dca0883c1aa6bbb4587115b7"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.468053 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" event={"ID":"5640bad0-ba52-4bc4-845d-d47987318155","Type":"ContainerStarted","Data":"50fe249efb9f96cbc3be46889b934480877148aefa9f256befe8af7c650df798"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.468117 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" event={"ID":"5640bad0-ba52-4bc4-845d-d47987318155","Type":"ContainerStarted","Data":"c34972a2761e4302930452e582db5f7e7b521213119e75b1cf1b9ecded2a90ab"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.468285 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.470238 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" event={"ID":"697c8fad-c587-41ce-ae4a-158bb22b6394","Type":"ContainerStarted","Data":"1feb9e5faaca036c9e5c8142e666c164c4d30113ddb7f25f7e9d6b79487183b3"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.470292 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" event={"ID":"697c8fad-c587-41ce-ae4a-158bb22b6394","Type":"ContainerStarted","Data":"69afc2b1423100e703189c13a9e8c82edde3ce168d91735a8cd2dbdc3870d961"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.470473 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.471631 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" event={"ID":"ce060aca-e2c3-4454-b126-719a572ece48","Type":"ContainerStarted","Data":"a02643ddd4037d9747a73fd7e41659600e68a7e7e4ff4498591fb85fab470703"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.473201 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" event={"ID":"fb574298-9e57-474c-9f80-faa7be6cded8","Type":"ContainerStarted","Data":"6b349ecd3e52cf0d98e2f15f105a9176c9b7e2343cdd46c80e1f40442e9c5eaa"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.474957 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" event={"ID":"ecb68de8-b267-4c69-baf4-078e3feacf8e","Type":"ContainerStarted","Data":"e2459a3cf19733cf117b821d912d45e1c0ae339ac7e4e1083b5d51b9f2526f27"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.477666 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" event={"ID":"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7","Type":"ContainerStarted","Data":"c4a89caee81ad2aa7092e3cfe24486bab655756bd8032657b0f717373c759798"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.477704 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" event={"ID":"731aedb5-2e95-4d08-9a4e-6c27e64d5ea7","Type":"ContainerStarted","Data":"91bed204371a156d99b82346bbea29a3e1273f31ed803a42635cfbcae7e95b8a"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.483911 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" event={"ID":"9744f748-86b6-417c-ab38-18cc3ad9b89a","Type":"ContainerStarted","Data":"1702f50648035af621b272abb1d51965efc47a1c475b831f85b84d13eb06b15d"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.484056 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.485592 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" event={"ID":"05397964-6686-490a-ab73-ec535a262794","Type":"ContainerStarted","Data":"f155eea94a4e9207b1d97dd6171d0a5154f5e05a42b652ebe3d9974df5ca5cb5"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.485630 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" event={"ID":"05397964-6686-490a-ab73-ec535a262794","Type":"ContainerStarted","Data":"6ed1ae62ca3e4e2e7c6a42c4cb954ea147b6d40440f4075d3c02c1d0bcd0c4a5"} Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.485728 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.486253 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-xjrkn" podStartSLOduration=13.552546872 podStartE2EDuration="23.486241518s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.820545482 +0000 UTC m=+711.385558454" lastFinishedPulling="2025-12-01 06:55:11.754240127 +0000 UTC m=+721.319253100" observedRunningTime="2025-12-01 06:55:23.48347617 +0000 UTC m=+733.048489143" watchObservedRunningTime="2025-12-01 06:55:23.486241518 +0000 UTC m=+733.051254491" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.487101 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.504191 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-4lrqg" podStartSLOduration=13.11990756 podStartE2EDuration="23.504178863s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.354816594 +0000 UTC m=+710.919829567" lastFinishedPulling="2025-12-01 06:55:11.739087896 +0000 UTC m=+721.304100870" observedRunningTime="2025-12-01 06:55:23.501942804 +0000 UTC m=+733.066955778" watchObservedRunningTime="2025-12-01 06:55:23.504178863 +0000 UTC m=+733.069191836" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.532762 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-7v8l7" podStartSLOduration=13.701302618 podStartE2EDuration="23.532738903s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.906333874 +0000 UTC m=+711.471346838" lastFinishedPulling="2025-12-01 06:55:11.73777015 +0000 UTC m=+721.302783123" observedRunningTime="2025-12-01 06:55:23.530977669 +0000 UTC m=+733.095990652" watchObservedRunningTime="2025-12-01 06:55:23.532738903 +0000 UTC m=+733.097751875" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.556205 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-7jjwb" podStartSLOduration=3.124753924 podStartE2EDuration="23.55618404s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.906697581 +0000 UTC m=+711.471710554" lastFinishedPulling="2025-12-01 06:55:22.338127697 +0000 UTC m=+731.903140670" observedRunningTime="2025-12-01 06:55:23.54996737 +0000 UTC m=+733.114980343" watchObservedRunningTime="2025-12-01 06:55:23.55618404 +0000 UTC m=+733.121197013" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.564596 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" podStartSLOduration=3.207627293 podStartE2EDuration="23.564581946s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.970122253 +0000 UTC m=+711.535135226" lastFinishedPulling="2025-12-01 06:55:22.327076906 +0000 UTC m=+731.892089879" observedRunningTime="2025-12-01 06:55:23.56426109 +0000 UTC m=+733.129274063" watchObservedRunningTime="2025-12-01 06:55:23.564581946 +0000 UTC m=+733.129594918" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.577022 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" podStartSLOduration=3.15673017 podStartE2EDuration="23.577000237s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.917880643 +0000 UTC m=+711.482893616" lastFinishedPulling="2025-12-01 06:55:22.33815071 +0000 UTC m=+731.903163683" observedRunningTime="2025-12-01 06:55:23.575188559 +0000 UTC m=+733.140201542" watchObservedRunningTime="2025-12-01 06:55:23.577000237 +0000 UTC m=+733.142013210" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.595449 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" podStartSLOduration=18.272397832 podStartE2EDuration="23.59542937s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:16.993492446 +0000 UTC m=+726.558505419" lastFinishedPulling="2025-12-01 06:55:22.316523984 +0000 UTC m=+731.881536957" observedRunningTime="2025-12-01 06:55:23.589688879 +0000 UTC m=+733.154701852" watchObservedRunningTime="2025-12-01 06:55:23.59542937 +0000 UTC m=+733.160442343" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.617618 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" podStartSLOduration=18.413789719 podStartE2EDuration="23.617605062s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:17.106722275 +0000 UTC m=+726.671735248" lastFinishedPulling="2025-12-01 06:55:22.310537619 +0000 UTC m=+731.875550591" observedRunningTime="2025-12-01 06:55:23.614509041 +0000 UTC m=+733.179522014" watchObservedRunningTime="2025-12-01 06:55:23.617605062 +0000 UTC m=+733.182618035" Dec 01 06:55:23 crc kubenswrapper[4632]: I1201 06:55:23.631558 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-674sk" podStartSLOduration=7.14896135 podStartE2EDuration="23.631537261s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="2025-12-01 06:55:01.969906215 +0000 UTC m=+711.534919188" lastFinishedPulling="2025-12-01 06:55:18.452482126 +0000 UTC m=+728.017495099" observedRunningTime="2025-12-01 06:55:23.628963144 +0000 UTC m=+733.193976116" watchObservedRunningTime="2025-12-01 06:55:23.631537261 +0000 UTC m=+733.196550234" Dec 01 06:55:24 crc kubenswrapper[4632]: I1201 06:55:24.492227 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:30 crc kubenswrapper[4632]: I1201 06:55:30.786275 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-wnk9x" Dec 01 06:55:30 crc kubenswrapper[4632]: I1201 06:55:30.894121 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-x2k78" Dec 01 06:55:30 crc kubenswrapper[4632]: I1201 06:55:30.923893 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-x2252" Dec 01 06:55:31 crc kubenswrapper[4632]: I1201 06:55:31.031483 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4hqhb" Dec 01 06:55:31 crc kubenswrapper[4632]: I1201 06:55:31.133118 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-zkfsl" Dec 01 06:55:32 crc kubenswrapper[4632]: I1201 06:55:32.885679 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:32 crc kubenswrapper[4632]: I1201 06:55:32.890760 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9-webhook-certs\") pod \"openstack-operator-controller-manager-656fd97d56-dfqrf\" (UID: \"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9\") " pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.004914 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-rpdct" Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.014298 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.408928 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf"] Dec 01 06:55:33 crc kubenswrapper[4632]: W1201 06:55:33.414523 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b8afb8f_7d54_43b7_80ec_ff5e2ee806e9.slice/crio-a5d6a874080458180c38c0d3978ae84fd829359ac188b0c052c004aa9d9bbb0b WatchSource:0}: Error finding container a5d6a874080458180c38c0d3978ae84fd829359ac188b0c052c004aa9d9bbb0b: Status 404 returned error can't find the container with id a5d6a874080458180c38c0d3978ae84fd829359ac188b0c052c004aa9d9bbb0b Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.555133 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" event={"ID":"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9","Type":"ContainerStarted","Data":"9ce3c0e05609e82470ab4565b46411570d97134a1d9a466a5d1d829340cf5771"} Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.555187 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" event={"ID":"0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9","Type":"ContainerStarted","Data":"a5d6a874080458180c38c0d3978ae84fd829359ac188b0c052c004aa9d9bbb0b"} Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.555289 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:33 crc kubenswrapper[4632]: I1201 06:55:33.577282 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" podStartSLOduration=33.577268138 podStartE2EDuration="33.577268138s" podCreationTimestamp="2025-12-01 06:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:55:33.573872581 +0000 UTC m=+743.138885573" watchObservedRunningTime="2025-12-01 06:55:33.577268138 +0000 UTC m=+743.142281111" Dec 01 06:55:35 crc kubenswrapper[4632]: I1201 06:55:35.421416 4632 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 01 06:55:36 crc kubenswrapper[4632]: I1201 06:55:36.494323 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6698bcb446x48x8" Dec 01 06:55:36 crc kubenswrapper[4632]: I1201 06:55:36.536632 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-bbrm4" Dec 01 06:55:43 crc kubenswrapper[4632]: I1201 06:55:43.019407 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-656fd97d56-dfqrf" Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.498137 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.498912 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.498967 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.499551 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.499619 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637" gracePeriod=600 Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.669845 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637" exitCode=0 Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.669909 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637"} Dec 01 06:55:49 crc kubenswrapper[4632]: I1201 06:55:49.670173 4632 scope.go:117] "RemoveContainer" containerID="ee7f539e03195ed0ab8d1a700f3bda3f809d44f758cd7a5f013919000dd76f35" Dec 01 06:55:50 crc kubenswrapper[4632]: I1201 06:55:50.680267 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491"} Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.829808 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.831458 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.833223 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.833611 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.833752 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-vdpsm" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.833825 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.849728 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.867026 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.868363 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.870029 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.882403 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.884632 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.884672 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5dlgd\" (UniqueName: \"kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.884724 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.884793 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpzqm\" (UniqueName: \"kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.884827 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.986902 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.986978 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5dlgd\" (UniqueName: \"kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.987037 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.987156 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpzqm\" (UniqueName: \"kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.987204 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.988326 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.988660 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:55:59 crc kubenswrapper[4632]: I1201 06:55:59.988674 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.006950 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpzqm\" (UniqueName: \"kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm\") pod \"dnsmasq-dns-d4c79f9f7-g2t4b\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.006969 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5dlgd\" (UniqueName: \"kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd\") pod \"dnsmasq-dns-c8f8bcb7c-sx4lh\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.163995 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.179797 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.545480 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.589090 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:56:00 crc kubenswrapper[4632]: W1201 06:56:00.589657 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod659fe06a_35e7_425e_ac75_e4e1169cc7bf.slice/crio-f76c1aebd453cb7bfd01df2ea04538c956272c98a20c84f9ea16b84cf13c37f5 WatchSource:0}: Error finding container f76c1aebd453cb7bfd01df2ea04538c956272c98a20c84f9ea16b84cf13c37f5: Status 404 returned error can't find the container with id f76c1aebd453cb7bfd01df2ea04538c956272c98a20c84f9ea16b84cf13c37f5 Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.761591 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" event={"ID":"093dff7e-f87e-48a1-93b2-1b8af26cbc15","Type":"ContainerStarted","Data":"47c722d3f933384608a3a5df06545fda6be3567a772f5c55e167b79f37bd0b85"} Dec 01 06:56:00 crc kubenswrapper[4632]: I1201 06:56:00.761655 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" event={"ID":"659fe06a-35e7-425e-ac75-e4e1169cc7bf","Type":"ContainerStarted","Data":"f76c1aebd453cb7bfd01df2ea04538c956272c98a20c84f9ea16b84cf13c37f5"} Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.901407 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.921635 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.924225 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.940778 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.944455 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.944506 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dljm5\" (UniqueName: \"kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:02 crc kubenswrapper[4632]: I1201 06:56:02.944603 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.046256 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.046475 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.046506 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dljm5\" (UniqueName: \"kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.047889 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.047957 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.067597 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dljm5\" (UniqueName: \"kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5\") pod \"dnsmasq-dns-f75b6684c-v6km2\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.189561 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.207452 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.208775 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.223837 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.246196 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.250256 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.250395 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g6hj\" (UniqueName: \"kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.250443 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.351672 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.351753 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g6hj\" (UniqueName: \"kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.351802 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.352763 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.353488 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.373720 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g6hj\" (UniqueName: \"kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj\") pod \"dnsmasq-dns-687bbf6d65-czr5n\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.532037 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.702269 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.785171 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" event={"ID":"e5fcf062-7331-4a42-8f87-cb6fba34de17","Type":"ContainerStarted","Data":"35a56697bf06ba4b8b0a48c6cfe6c5ed1bfc5d80b8cefe76562fc7e9a5109271"} Dec 01 06:56:03 crc kubenswrapper[4632]: I1201 06:56:03.954982 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.083192 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.098606 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.104527 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.104997 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.105185 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.105651 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.107836 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.108565 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.109183 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6s96d" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.119402 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213672 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213725 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213781 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213801 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213816 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213837 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213857 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.213984 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.214071 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92w4z\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.214123 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.214158 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.315964 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316011 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316032 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316048 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316072 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316096 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316156 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92w4z\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316644 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316722 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316935 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.316969 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.319141 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.320394 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.321993 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.322207 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.323275 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.323928 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.326271 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.326827 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.327140 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.327830 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.333184 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.333286 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.333474 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.333829 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g6b89" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.333965 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.334106 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.334121 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.334584 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.336038 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.340336 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92w4z\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.342710 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.352641 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.419921 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420254 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420304 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420324 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjg6x\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420458 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420603 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420667 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420741 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420790 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420854 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.420895 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.423327 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522645 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522713 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjg6x\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522741 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522789 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522819 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522851 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522878 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522915 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522942 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522959 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.522982 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.525031 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.525316 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.525493 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.526324 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.526929 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.528172 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.528154 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.529963 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.531677 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.532083 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.540876 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjg6x\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.547753 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.709279 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.791407 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 06:56:04 crc kubenswrapper[4632]: I1201 06:56:04.799999 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" event={"ID":"5f8f3547-1296-4a5b-b1a5-427ee1c1d763","Type":"ContainerStarted","Data":"d236698dc0ec71aa1df96763c76b02e36d6f16685645769d14dce321d8ff527a"} Dec 01 06:56:04 crc kubenswrapper[4632]: W1201 06:56:04.806575 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff6e2f27_a2b5_4046_8e0e_dc495271a359.slice/crio-1492c7d3d20d10bed7fab58ccf81080919eb8c1d64c2bb4b2460261cff552550 WatchSource:0}: Error finding container 1492c7d3d20d10bed7fab58ccf81080919eb8c1d64c2bb4b2460261cff552550: Status 404 returned error can't find the container with id 1492c7d3d20d10bed7fab58ccf81080919eb8c1d64c2bb4b2460261cff552550 Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.154287 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 06:56:05 crc kubenswrapper[4632]: W1201 06:56:05.164038 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67043517_303b_4159_a030_1192c39b98dd.slice/crio-eca8d2a80ee7de999e2e461fff68c8ca8c684bc394de05c91609f36c3cad9193 WatchSource:0}: Error finding container eca8d2a80ee7de999e2e461fff68c8ca8c684bc394de05c91609f36c3cad9193: Status 404 returned error can't find the container with id eca8d2a80ee7de999e2e461fff68c8ca8c684bc394de05c91609f36c3cad9193 Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.818331 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerStarted","Data":"1492c7d3d20d10bed7fab58ccf81080919eb8c1d64c2bb4b2460261cff552550"} Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.819875 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerStarted","Data":"eca8d2a80ee7de999e2e461fff68c8ca8c684bc394de05c91609f36c3cad9193"} Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.968773 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.970471 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.975298 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.976372 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-nbp7s" Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.977399 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.977478 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.979034 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 06:56:05 crc kubenswrapper[4632]: I1201 06:56:05.983048 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.058982 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059105 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtjsp\" (UniqueName: \"kubernetes.io/projected/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kube-api-access-vtjsp\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059169 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059199 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059228 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059271 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059304 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kolla-config\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.059331 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-default\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160697 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160762 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kolla-config\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160827 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-default\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160885 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160943 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtjsp\" (UniqueName: \"kubernetes.io/projected/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kube-api-access-vtjsp\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.160991 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.161044 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.161078 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.161747 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kolla-config\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.161827 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.161943 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.162043 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-config-data-default\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.164463 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.167630 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.168663 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.175428 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtjsp\" (UniqueName: \"kubernetes.io/projected/ecd36c5b-96fc-49af-b8f2-634fcf854cfa-kube-api-access-vtjsp\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.200486 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"ecd36c5b-96fc-49af-b8f2-634fcf854cfa\") " pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.297579 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.744027 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 01 06:56:06 crc kubenswrapper[4632]: W1201 06:56:06.760858 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecd36c5b_96fc_49af_b8f2_634fcf854cfa.slice/crio-f4fc419d39a9ec7771cae32bc7ed4a34e6e8cb74b4f293ed033ae7df817a7af5 WatchSource:0}: Error finding container f4fc419d39a9ec7771cae32bc7ed4a34e6e8cb74b4f293ed033ae7df817a7af5: Status 404 returned error can't find the container with id f4fc419d39a9ec7771cae32bc7ed4a34e6e8cb74b4f293ed033ae7df817a7af5 Dec 01 06:56:06 crc kubenswrapper[4632]: I1201 06:56:06.833519 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ecd36c5b-96fc-49af-b8f2-634fcf854cfa","Type":"ContainerStarted","Data":"f4fc419d39a9ec7771cae32bc7ed4a34e6e8cb74b4f293ed033ae7df817a7af5"} Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.268946 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.270869 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.274348 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.274715 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.274729 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.274718 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-wjvff" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.275607 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387082 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387341 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387467 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387613 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387716 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpzzp\" (UniqueName: \"kubernetes.io/projected/d73ee8ba-1384-40b9-bbe8-62425cd044db-kube-api-access-gpzzp\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387748 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387833 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.387939 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490477 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490581 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490630 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490725 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490792 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490820 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490865 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpzzp\" (UniqueName: \"kubernetes.io/projected/d73ee8ba-1384-40b9-bbe8-62425cd044db-kube-api-access-gpzzp\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.490887 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.491773 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.492000 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d73ee8ba-1384-40b9-bbe8-62425cd044db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.492196 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.495268 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.501516 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d73ee8ba-1384-40b9-bbe8-62425cd044db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.504340 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.516272 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpzzp\" (UniqueName: \"kubernetes.io/projected/d73ee8ba-1384-40b9-bbe8-62425cd044db-kube-api-access-gpzzp\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.530942 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73ee8ba-1384-40b9-bbe8-62425cd044db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.535484 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d73ee8ba-1384-40b9-bbe8-62425cd044db\") " pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.557050 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.558566 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.564179 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.565677 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.565761 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-2lt9q" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.576588 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.588796 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.600208 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-config-data\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.600412 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.600440 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.601243 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcwwm\" (UniqueName: \"kubernetes.io/projected/df701889-0ecf-4452-8689-40cc4c4de347-kube-api-access-gcwwm\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.601289 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-kolla-config\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.703211 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.703259 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.703312 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcwwm\" (UniqueName: \"kubernetes.io/projected/df701889-0ecf-4452-8689-40cc4c4de347-kube-api-access-gcwwm\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.703343 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-kolla-config\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.703428 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-config-data\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.704370 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-kolla-config\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.704439 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/df701889-0ecf-4452-8689-40cc4c4de347-config-data\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.707448 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-combined-ca-bundle\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.716348 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/df701889-0ecf-4452-8689-40cc4c4de347-memcached-tls-certs\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.721784 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcwwm\" (UniqueName: \"kubernetes.io/projected/df701889-0ecf-4452-8689-40cc4c4de347-kube-api-access-gcwwm\") pod \"memcached-0\" (UID: \"df701889-0ecf-4452-8689-40cc4c4de347\") " pod="openstack/memcached-0" Dec 01 06:56:07 crc kubenswrapper[4632]: I1201 06:56:07.916668 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 01 06:56:08 crc kubenswrapper[4632]: I1201 06:56:08.088734 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 01 06:56:08 crc kubenswrapper[4632]: W1201 06:56:08.107972 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd73ee8ba_1384_40b9_bbe8_62425cd044db.slice/crio-70088395c82781e087499bad284b4f8c5d0f90aeee993329907333f23a585319 WatchSource:0}: Error finding container 70088395c82781e087499bad284b4f8c5d0f90aeee993329907333f23a585319: Status 404 returned error can't find the container with id 70088395c82781e087499bad284b4f8c5d0f90aeee993329907333f23a585319 Dec 01 06:56:08 crc kubenswrapper[4632]: I1201 06:56:08.359882 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 01 06:56:08 crc kubenswrapper[4632]: I1201 06:56:08.867274 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d73ee8ba-1384-40b9-bbe8-62425cd044db","Type":"ContainerStarted","Data":"70088395c82781e087499bad284b4f8c5d0f90aeee993329907333f23a585319"} Dec 01 06:56:08 crc kubenswrapper[4632]: I1201 06:56:08.878287 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"df701889-0ecf-4452-8689-40cc4c4de347","Type":"ContainerStarted","Data":"3ef79dd1ff348cc8b1f635f6fb4ef7bdca9cf4cef8ad9350d0f32e44743bdf2c"} Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.282232 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.288273 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.292443 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-bdhbs" Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.295620 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.339754 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhgj8\" (UniqueName: \"kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8\") pod \"kube-state-metrics-0\" (UID: \"5f0d2218-83ab-431d-a2d3-e7d54237abff\") " pod="openstack/kube-state-metrics-0" Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.441552 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhgj8\" (UniqueName: \"kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8\") pod \"kube-state-metrics-0\" (UID: \"5f0d2218-83ab-431d-a2d3-e7d54237abff\") " pod="openstack/kube-state-metrics-0" Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.460935 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhgj8\" (UniqueName: \"kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8\") pod \"kube-state-metrics-0\" (UID: \"5f0d2218-83ab-431d-a2d3-e7d54237abff\") " pod="openstack/kube-state-metrics-0" Dec 01 06:56:09 crc kubenswrapper[4632]: I1201 06:56:09.630663 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:56:10 crc kubenswrapper[4632]: I1201 06:56:10.055853 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:56:10 crc kubenswrapper[4632]: W1201 06:56:10.063304 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f0d2218_83ab_431d_a2d3_e7d54237abff.slice/crio-80c67f9f39894396310f7f3b053174f7eb310a0c69d89051ab4b39350fa61772 WatchSource:0}: Error finding container 80c67f9f39894396310f7f3b053174f7eb310a0c69d89051ab4b39350fa61772: Status 404 returned error can't find the container with id 80c67f9f39894396310f7f3b053174f7eb310a0c69d89051ab4b39350fa61772 Dec 01 06:56:10 crc kubenswrapper[4632]: I1201 06:56:10.943628 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f0d2218-83ab-431d-a2d3-e7d54237abff","Type":"ContainerStarted","Data":"80c67f9f39894396310f7f3b053174f7eb310a0c69d89051ab4b39350fa61772"} Dec 01 06:56:12 crc kubenswrapper[4632]: I1201 06:56:12.970777 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f0d2218-83ab-431d-a2d3-e7d54237abff","Type":"ContainerStarted","Data":"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708"} Dec 01 06:56:12 crc kubenswrapper[4632]: I1201 06:56:12.972808 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 06:56:12 crc kubenswrapper[4632]: I1201 06:56:12.991147 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.543536564 podStartE2EDuration="3.991125863s" podCreationTimestamp="2025-12-01 06:56:09 +0000 UTC" firstStartedPulling="2025-12-01 06:56:10.067556686 +0000 UTC m=+779.632569649" lastFinishedPulling="2025-12-01 06:56:12.515145975 +0000 UTC m=+782.080158948" observedRunningTime="2025-12-01 06:56:12.986782648 +0000 UTC m=+782.551795621" watchObservedRunningTime="2025-12-01 06:56:12.991125863 +0000 UTC m=+782.556138835" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.790824 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hw5x4"] Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.792048 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.796654 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.796721 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-clfdc" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.796900 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.828280 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4"] Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842576 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-ovn-controller-tls-certs\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842631 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-log-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842655 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-combined-ca-bundle\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842757 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842782 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842849 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe332539-435d-44e0-bcf5-c47332ed1e55-scripts\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.842868 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djdw2\" (UniqueName: \"kubernetes.io/projected/fe332539-435d-44e0-bcf5-c47332ed1e55-kube-api-access-djdw2\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.844033 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-rx6vw"] Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.846069 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.850486 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rx6vw"] Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944022 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-combined-ca-bundle\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944060 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-log-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944564 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944606 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-lib\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944625 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944646 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-log\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944709 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6blfq\" (UniqueName: \"kubernetes.io/projected/e22d1e7d-958f-4e02-911b-31f513dd9802-kube-api-access-6blfq\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944711 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-log-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944805 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-run\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944823 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944841 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe332539-435d-44e0-bcf5-c47332ed1e55-scripts\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944916 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e22d1e7d-958f-4e02-911b-31f513dd9802-scripts\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944946 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djdw2\" (UniqueName: \"kubernetes.io/projected/fe332539-435d-44e0-bcf5-c47332ed1e55-kube-api-access-djdw2\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.944953 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/fe332539-435d-44e0-bcf5-c47332ed1e55-var-run-ovn\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.945085 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-ovn-controller-tls-certs\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.945130 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-etc-ovs\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.952099 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fe332539-435d-44e0-bcf5-c47332ed1e55-scripts\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.953663 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-ovn-controller-tls-certs\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.953833 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe332539-435d-44e0-bcf5-c47332ed1e55-combined-ca-bundle\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:13 crc kubenswrapper[4632]: I1201 06:56:13.959274 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djdw2\" (UniqueName: \"kubernetes.io/projected/fe332539-435d-44e0-bcf5-c47332ed1e55-kube-api-access-djdw2\") pod \"ovn-controller-hw5x4\" (UID: \"fe332539-435d-44e0-bcf5-c47332ed1e55\") " pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.045986 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6blfq\" (UniqueName: \"kubernetes.io/projected/e22d1e7d-958f-4e02-911b-31f513dd9802-kube-api-access-6blfq\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046058 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-run\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046088 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e22d1e7d-958f-4e02-911b-31f513dd9802-scripts\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046120 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-etc-ovs\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046206 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-lib\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046222 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-log\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.046396 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-log\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.047002 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-run\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.047697 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-etc-ovs\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.047857 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/e22d1e7d-958f-4e02-911b-31f513dd9802-var-lib\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.049062 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e22d1e7d-958f-4e02-911b-31f513dd9802-scripts\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.061593 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6blfq\" (UniqueName: \"kubernetes.io/projected/e22d1e7d-958f-4e02-911b-31f513dd9802-kube-api-access-6blfq\") pod \"ovn-controller-ovs-rx6vw\" (UID: \"e22d1e7d-958f-4e02-911b-31f513dd9802\") " pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.118639 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.171760 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.602191 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4"] Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.692809 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.694502 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.696615 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.696778 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.696874 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.697061 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.697062 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-rkhh8" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.698890 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.747323 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-rx6vw"] Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867433 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867542 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qls57\" (UniqueName: \"kubernetes.io/projected/c5f52c6d-0a10-43a9-84f8-940c156f3278-kube-api-access-qls57\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867628 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867692 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-config\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867774 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867809 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.867978 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.868107 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969588 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969648 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969681 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qls57\" (UniqueName: \"kubernetes.io/projected/c5f52c6d-0a10-43a9-84f8-940c156f3278-kube-api-access-qls57\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969701 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969743 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-config\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969774 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969791 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.969829 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.970267 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.970699 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.971789 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-config\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.972633 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c5f52c6d-0a10-43a9-84f8-940c156f3278-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.976235 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.976621 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:14 crc kubenswrapper[4632]: I1201 06:56:14.990195 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c5f52c6d-0a10-43a9-84f8-940c156f3278-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.000647 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qls57\" (UniqueName: \"kubernetes.io/projected/c5f52c6d-0a10-43a9-84f8-940c156f3278-kube-api-access-qls57\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.017960 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rx6vw" event={"ID":"e22d1e7d-958f-4e02-911b-31f513dd9802","Type":"ContainerStarted","Data":"c86061540d5ba9a283096594f1b7173701c12d5d829693f7dcf5c2aa225b5d32"} Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.022331 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4" event={"ID":"fe332539-435d-44e0-bcf5-c47332ed1e55","Type":"ContainerStarted","Data":"985c9758a3036d07ee96bf6ad280940dfac39a8cab16ae7cedd17fe4b5d2fc6f"} Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.024123 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c5f52c6d-0a10-43a9-84f8-940c156f3278\") " pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.213707 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-qkjhz"] Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.214818 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.217007 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.234590 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qkjhz"] Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.324043 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.383600 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovn-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.384458 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkc87\" (UniqueName: \"kubernetes.io/projected/e2d9748c-3d24-43dd-a125-3a20cfe296e2-kube-api-access-pkc87\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.384533 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.384573 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-combined-ca-bundle\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.385178 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovs-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.385711 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d9748c-3d24-43dd-a125-3a20cfe296e2-config\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.488986 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkc87\" (UniqueName: \"kubernetes.io/projected/e2d9748c-3d24-43dd-a125-3a20cfe296e2-kube-api-access-pkc87\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489042 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489079 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-combined-ca-bundle\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489112 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovs-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489132 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d9748c-3d24-43dd-a125-3a20cfe296e2-config\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489202 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovn-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489456 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovn-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.489466 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e2d9748c-3d24-43dd-a125-3a20cfe296e2-ovs-rundir\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.491526 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e2d9748c-3d24-43dd-a125-3a20cfe296e2-config\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.494502 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.494698 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2d9748c-3d24-43dd-a125-3a20cfe296e2-combined-ca-bundle\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.502533 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkc87\" (UniqueName: \"kubernetes.io/projected/e2d9748c-3d24-43dd-a125-3a20cfe296e2-kube-api-access-pkc87\") pod \"ovn-controller-metrics-qkjhz\" (UID: \"e2d9748c-3d24-43dd-a125-3a20cfe296e2\") " pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.534387 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-qkjhz" Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.845765 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 01 06:56:15 crc kubenswrapper[4632]: I1201 06:56:15.956299 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-qkjhz"] Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.336242 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.344896 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.344985 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.346797 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.346970 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.346992 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-svwh4" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.352727 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.511607 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.511902 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-config\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.511974 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.512061 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.512173 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.512211 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rbtx\" (UniqueName: \"kubernetes.io/projected/f0bb9103-afbb-45ea-9427-c4925dd007c9-kube-api-access-7rbtx\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.512256 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.512288 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615460 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615533 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rbtx\" (UniqueName: \"kubernetes.io/projected/f0bb9103-afbb-45ea-9427-c4925dd007c9-kube-api-access-7rbtx\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615570 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615590 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615614 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615632 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-config\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615667 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.615701 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.616042 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.616169 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.616630 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-config\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.617021 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f0bb9103-afbb-45ea-9427-c4925dd007c9-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.623792 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.625426 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.628098 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0bb9103-afbb-45ea-9427-c4925dd007c9-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.631895 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rbtx\" (UniqueName: \"kubernetes.io/projected/f0bb9103-afbb-45ea-9427-c4925dd007c9-kube-api-access-7rbtx\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.652106 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f0bb9103-afbb-45ea-9427-c4925dd007c9\") " pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:16 crc kubenswrapper[4632]: I1201 06:56:16.672048 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:19 crc kubenswrapper[4632]: I1201 06:56:19.639077 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 06:56:20 crc kubenswrapper[4632]: W1201 06:56:20.152699 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5f52c6d_0a10_43a9_84f8_940c156f3278.slice/crio-568fe4ca29b0a01e2d4945f34227539e3f7df87fbb1ead9edaf62fc270ce2502 WatchSource:0}: Error finding container 568fe4ca29b0a01e2d4945f34227539e3f7df87fbb1ead9edaf62fc270ce2502: Status 404 returned error can't find the container with id 568fe4ca29b0a01e2d4945f34227539e3f7df87fbb1ead9edaf62fc270ce2502 Dec 01 06:56:20 crc kubenswrapper[4632]: W1201 06:56:20.164041 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2d9748c_3d24_43dd_a125_3a20cfe296e2.slice/crio-7b12881adaacfcfb850015a4f2e987b00ac01f847d20956dc9241312dbe3eb84 WatchSource:0}: Error finding container 7b12881adaacfcfb850015a4f2e987b00ac01f847d20956dc9241312dbe3eb84: Status 404 returned error can't find the container with id 7b12881adaacfcfb850015a4f2e987b00ac01f847d20956dc9241312dbe3eb84 Dec 01 06:56:21 crc kubenswrapper[4632]: I1201 06:56:21.067485 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qkjhz" event={"ID":"e2d9748c-3d24-43dd-a125-3a20cfe296e2","Type":"ContainerStarted","Data":"7b12881adaacfcfb850015a4f2e987b00ac01f847d20956dc9241312dbe3eb84"} Dec 01 06:56:21 crc kubenswrapper[4632]: I1201 06:56:21.068339 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c5f52c6d-0a10-43a9-84f8-940c156f3278","Type":"ContainerStarted","Data":"568fe4ca29b0a01e2d4945f34227539e3f7df87fbb1ead9edaf62fc270ce2502"} Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.257733 4632 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.258340 4632 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.258501 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5dlgd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-c8f8bcb7c-sx4lh_openstack(659fe06a-35e7-425e-ac75-e4e1169cc7bf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.259738 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" podUID="659fe06a-35e7-425e-ac75-e4e1169cc7bf" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.280755 4632 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.280834 4632 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.280981 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zpzqm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-d4c79f9f7-g2t4b_openstack(093dff7e-f87e-48a1-93b2-1b8af26cbc15): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 06:56:28 crc kubenswrapper[4632]: E1201 06:56:28.282194 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" podUID="093dff7e-f87e-48a1-93b2-1b8af26cbc15" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.105344 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.112085 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.153406 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" event={"ID":"659fe06a-35e7-425e-ac75-e4e1169cc7bf","Type":"ContainerDied","Data":"f76c1aebd453cb7bfd01df2ea04538c956272c98a20c84f9ea16b84cf13c37f5"} Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.153496 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-c8f8bcb7c-sx4lh" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.155180 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" event={"ID":"093dff7e-f87e-48a1-93b2-1b8af26cbc15","Type":"ContainerDied","Data":"47c722d3f933384608a3a5df06545fda6be3567a772f5c55e167b79f37bd0b85"} Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.155234 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-d4c79f9f7-g2t4b" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.191523 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc\") pod \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.191562 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config\") pod \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.191609 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpzqm\" (UniqueName: \"kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm\") pod \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\" (UID: \"093dff7e-f87e-48a1-93b2-1b8af26cbc15\") " Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.191679 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5dlgd\" (UniqueName: \"kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd\") pod \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.191781 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config\") pod \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\" (UID: \"659fe06a-35e7-425e-ac75-e4e1169cc7bf\") " Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.192160 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "659fe06a-35e7-425e-ac75-e4e1169cc7bf" (UID: "659fe06a-35e7-425e-ac75-e4e1169cc7bf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.192422 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config" (OuterVolumeSpecName: "config") pod "093dff7e-f87e-48a1-93b2-1b8af26cbc15" (UID: "093dff7e-f87e-48a1-93b2-1b8af26cbc15"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.192456 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config" (OuterVolumeSpecName: "config") pod "659fe06a-35e7-425e-ac75-e4e1169cc7bf" (UID: "659fe06a-35e7-425e-ac75-e4e1169cc7bf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.192576 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.192593 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/093dff7e-f87e-48a1-93b2-1b8af26cbc15-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.197028 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm" (OuterVolumeSpecName: "kube-api-access-zpzqm") pod "093dff7e-f87e-48a1-93b2-1b8af26cbc15" (UID: "093dff7e-f87e-48a1-93b2-1b8af26cbc15"). InnerVolumeSpecName "kube-api-access-zpzqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.198408 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd" (OuterVolumeSpecName: "kube-api-access-5dlgd") pod "659fe06a-35e7-425e-ac75-e4e1169cc7bf" (UID: "659fe06a-35e7-425e-ac75-e4e1169cc7bf"). InnerVolumeSpecName "kube-api-access-5dlgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.295149 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/659fe06a-35e7-425e-ac75-e4e1169cc7bf-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.295181 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpzqm\" (UniqueName: \"kubernetes.io/projected/093dff7e-f87e-48a1-93b2-1b8af26cbc15-kube-api-access-zpzqm\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.295195 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5dlgd\" (UniqueName: \"kubernetes.io/projected/659fe06a-35e7-425e-ac75-e4e1169cc7bf-kube-api-access-5dlgd\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.568742 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.576144 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-c8f8bcb7c-sx4lh"] Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.592899 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.599621 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-d4c79f9f7-g2t4b"] Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.759894 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="093dff7e-f87e-48a1-93b2-1b8af26cbc15" path="/var/lib/kubelet/pods/093dff7e-f87e-48a1-93b2-1b8af26cbc15/volumes" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.760343 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="659fe06a-35e7-425e-ac75-e4e1169cc7bf" path="/var/lib/kubelet/pods/659fe06a-35e7-425e-ac75-e4e1169cc7bf/volumes" Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.938063 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 01 06:56:32 crc kubenswrapper[4632]: W1201 06:56:32.958886 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0bb9103_afbb_45ea_9427_c4925dd007c9.slice/crio-7c5948f1477f3707369e6e827e5f3d9dae9f3ab4249a3fa532a2d66c2f63fa7e WatchSource:0}: Error finding container 7c5948f1477f3707369e6e827e5f3d9dae9f3ab4249a3fa532a2d66c2f63fa7e: Status 404 returned error can't find the container with id 7c5948f1477f3707369e6e827e5f3d9dae9f3ab4249a3fa532a2d66c2f63fa7e Dec 01 06:56:32 crc kubenswrapper[4632]: I1201 06:56:32.961963 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 06:56:33 crc kubenswrapper[4632]: I1201 06:56:33.174698 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f0bb9103-afbb-45ea-9427-c4925dd007c9","Type":"ContainerStarted","Data":"7c5948f1477f3707369e6e827e5f3d9dae9f3ab4249a3fa532a2d66c2f63fa7e"} Dec 01 06:56:33 crc kubenswrapper[4632]: I1201 06:56:33.178176 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d73ee8ba-1384-40b9-bbe8-62425cd044db","Type":"ContainerStarted","Data":"bac35727270b6cf90b88cfbc1af1f43a67057c10a7a23903a8beef07e2aa4ad1"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.189777 4632 generic.go:334] "Generic (PLEG): container finished" podID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerID="ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66" exitCode=0 Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.189871 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" event={"ID":"e5fcf062-7331-4a42-8f87-cb6fba34de17","Type":"ContainerDied","Data":"ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.194983 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c5f52c6d-0a10-43a9-84f8-940c156f3278","Type":"ContainerStarted","Data":"330db2ec706e10346fe5be08fabf9cb7b12e2aa2e90d6694b2ada5cccef7d04b"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.199863 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerStarted","Data":"9dc2cfc06da8b761d9017a11b5369af0ede286129a1012ceeccb6b18a2e91a0d"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.204961 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ecd36c5b-96fc-49af-b8f2-634fcf854cfa","Type":"ContainerStarted","Data":"b3c5866325c129c491c9d80bbd9823e769c8f42607d5d8cb2580b3567669dfa9"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.207102 4632 generic.go:334] "Generic (PLEG): container finished" podID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerID="3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc" exitCode=0 Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.207166 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" event={"ID":"5f8f3547-1296-4a5b-b1a5-427ee1c1d763","Type":"ContainerDied","Data":"3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.210268 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"df701889-0ecf-4452-8689-40cc4c4de347","Type":"ContainerStarted","Data":"ff6e99225f2845b149e235e788d82e2357b754d7e243cd4b739cfb0f2509eebc"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.210456 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.212879 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerStarted","Data":"9b9c7b2f6b6d2cb3fc2cb5af9ba4600c190b2153eb2f1f44d58f42a457c11540"} Dec 01 06:56:34 crc kubenswrapper[4632]: I1201 06:56:34.288761 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.74482158 podStartE2EDuration="27.288741955s" podCreationTimestamp="2025-12-01 06:56:07 +0000 UTC" firstStartedPulling="2025-12-01 06:56:08.36459795 +0000 UTC m=+777.929610923" lastFinishedPulling="2025-12-01 06:56:32.908518326 +0000 UTC m=+802.473531298" observedRunningTime="2025-12-01 06:56:34.285773483 +0000 UTC m=+803.850786457" watchObservedRunningTime="2025-12-01 06:56:34.288741955 +0000 UTC m=+803.853754928" Dec 01 06:56:34 crc kubenswrapper[4632]: E1201 06:56:34.765942 4632 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 01 06:56:34 crc kubenswrapper[4632]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/e5fcf062-7331-4a42-8f87-cb6fba34de17/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 01 06:56:34 crc kubenswrapper[4632]: > podSandboxID="35a56697bf06ba4b8b0a48c6cfe6c5ed1bfc5d80b8cefe76562fc7e9a5109271" Dec 01 06:56:34 crc kubenswrapper[4632]: E1201 06:56:34.766294 4632 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 01 06:56:34 crc kubenswrapper[4632]: container &Container{Name:dnsmasq-dns,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-neutron-server:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dljm5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-f75b6684c-v6km2_openstack(e5fcf062-7331-4a42-8f87-cb6fba34de17): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/e5fcf062-7331-4a42-8f87-cb6fba34de17/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 01 06:56:34 crc kubenswrapper[4632]: > logger="UnhandledError" Dec 01 06:56:34 crc kubenswrapper[4632]: E1201 06:56:34.767579 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/e5fcf062-7331-4a42-8f87-cb6fba34de17/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.223530 4632 generic.go:334] "Generic (PLEG): container finished" podID="e22d1e7d-958f-4e02-911b-31f513dd9802" containerID="d0d2630e7cd28f1c9ee329e2f1b8a7cfe5f48a11af62654a9e0cf23188007e25" exitCode=0 Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.223618 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rx6vw" event={"ID":"e22d1e7d-958f-4e02-911b-31f513dd9802","Type":"ContainerDied","Data":"d0d2630e7cd28f1c9ee329e2f1b8a7cfe5f48a11af62654a9e0cf23188007e25"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.226909 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-qkjhz" event={"ID":"e2d9748c-3d24-43dd-a125-3a20cfe296e2","Type":"ContainerStarted","Data":"179fd5d4dd296af804299375bc0c726e693341ba2306e54d4674b38e7cb68b87"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.229484 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f0bb9103-afbb-45ea-9427-c4925dd007c9","Type":"ContainerStarted","Data":"31027fd5e09961b724263cc0ba162ece408243fb1c0ae0c805982ab16d7c04df"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.229525 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f0bb9103-afbb-45ea-9427-c4925dd007c9","Type":"ContainerStarted","Data":"490f6c616cacb18ac40455515f1aa4d93099d7df171c70c1b3f58f598d355b81"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.231343 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4" event={"ID":"fe332539-435d-44e0-bcf5-c47332ed1e55","Type":"ContainerStarted","Data":"ba086b222dda22a17658a9ec0b3f994b8a0481fc1470f4f05f6c5943fbb8caa9"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.231492 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-hw5x4" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.233526 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c5f52c6d-0a10-43a9-84f8-940c156f3278","Type":"ContainerStarted","Data":"5e3c7136ab38538a5bc8b63f297fbf70ce97c5d19b8bcd20098b82ff92dfbe82"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.235933 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" event={"ID":"5f8f3547-1296-4a5b-b1a5-427ee1c1d763","Type":"ContainerStarted","Data":"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4"} Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.236224 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.263367 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=9.498266642 podStartE2EDuration="22.263335457s" podCreationTimestamp="2025-12-01 06:56:13 +0000 UTC" firstStartedPulling="2025-12-01 06:56:20.158698553 +0000 UTC m=+789.723711526" lastFinishedPulling="2025-12-01 06:56:32.923767367 +0000 UTC m=+802.488780341" observedRunningTime="2025-12-01 06:56:35.259506953 +0000 UTC m=+804.824519925" watchObservedRunningTime="2025-12-01 06:56:35.263335457 +0000 UTC m=+804.828348429" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.277320 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" podStartSLOduration=3.366070597 podStartE2EDuration="32.277305757s" podCreationTimestamp="2025-12-01 06:56:03 +0000 UTC" firstStartedPulling="2025-12-01 06:56:03.995500191 +0000 UTC m=+773.560513164" lastFinishedPulling="2025-12-01 06:56:32.906735351 +0000 UTC m=+802.471748324" observedRunningTime="2025-12-01 06:56:35.276829858 +0000 UTC m=+804.841842832" watchObservedRunningTime="2025-12-01 06:56:35.277305757 +0000 UTC m=+804.842318730" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.292086 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-qkjhz" podStartSLOduration=7.469363486 podStartE2EDuration="20.29205109s" podCreationTimestamp="2025-12-01 06:56:15 +0000 UTC" firstStartedPulling="2025-12-01 06:56:20.167562297 +0000 UTC m=+789.732575270" lastFinishedPulling="2025-12-01 06:56:32.990249902 +0000 UTC m=+802.555262874" observedRunningTime="2025-12-01 06:56:35.289912033 +0000 UTC m=+804.854925027" watchObservedRunningTime="2025-12-01 06:56:35.29205109 +0000 UTC m=+804.857064063" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.334270 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.340146 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-hw5x4" podStartSLOduration=4.005260535 podStartE2EDuration="22.340125669s" podCreationTimestamp="2025-12-01 06:56:13 +0000 UTC" firstStartedPulling="2025-12-01 06:56:14.622394138 +0000 UTC m=+784.187407112" lastFinishedPulling="2025-12-01 06:56:32.957259273 +0000 UTC m=+802.522272246" observedRunningTime="2025-12-01 06:56:35.306495344 +0000 UTC m=+804.871508317" watchObservedRunningTime="2025-12-01 06:56:35.340125669 +0000 UTC m=+804.905138642" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.349730 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=18.760250265 podStartE2EDuration="20.349710975s" podCreationTimestamp="2025-12-01 06:56:15 +0000 UTC" firstStartedPulling="2025-12-01 06:56:32.961689111 +0000 UTC m=+802.526702084" lastFinishedPulling="2025-12-01 06:56:34.551149822 +0000 UTC m=+804.116162794" observedRunningTime="2025-12-01 06:56:35.333993208 +0000 UTC m=+804.899006191" watchObservedRunningTime="2025-12-01 06:56:35.349710975 +0000 UTC m=+804.914723949" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.468194 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.496457 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.497809 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.500147 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.519469 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.568033 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.568264 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.568411 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdwv2\" (UniqueName: \"kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.568444 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.607762 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.638119 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.639441 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.649635 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.654307 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670100 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670160 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlz8b\" (UniqueName: \"kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670229 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdwv2\" (UniqueName: \"kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670248 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670489 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670750 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.670946 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.671067 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.671124 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.671171 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.671326 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.672005 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.691709 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdwv2\" (UniqueName: \"kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2\") pod \"dnsmasq-dns-78d59965b5-7g8dg\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.773223 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.773436 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.773621 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlz8b\" (UniqueName: \"kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.773738 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.773842 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.774332 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.774389 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.774590 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.775138 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.794683 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlz8b\" (UniqueName: \"kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b\") pod \"dnsmasq-dns-cf6bcbc4c-kpftc\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.822188 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:35 crc kubenswrapper[4632]: I1201 06:56:35.991951 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.221076 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:36 crc kubenswrapper[4632]: W1201 06:56:36.231697 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd384efc7_ec80_4645_88a9_7adb93e8f4d5.slice/crio-dc50c6c8024db65b5c0545458c301a3ab26baf175aacd590d131933fafa48330 WatchSource:0}: Error finding container dc50c6c8024db65b5c0545458c301a3ab26baf175aacd590d131933fafa48330: Status 404 returned error can't find the container with id dc50c6c8024db65b5c0545458c301a3ab26baf175aacd590d131933fafa48330 Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.244647 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" event={"ID":"d384efc7-ec80-4645-88a9-7adb93e8f4d5","Type":"ContainerStarted","Data":"dc50c6c8024db65b5c0545458c301a3ab26baf175aacd590d131933fafa48330"} Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.247203 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rx6vw" event={"ID":"e22d1e7d-958f-4e02-911b-31f513dd9802","Type":"ContainerStarted","Data":"72cba98fbd23cc19d631745ca341c3c2b8227b8d013e73c496639dc72a784b4c"} Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.247255 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-rx6vw" event={"ID":"e22d1e7d-958f-4e02-911b-31f513dd9802","Type":"ContainerStarted","Data":"468c0591690bb55d9b83eb2a735078b9b4b9214b33c4442019a67008d1aed5aa"} Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.247498 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.250469 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" event={"ID":"e5fcf062-7331-4a42-8f87-cb6fba34de17","Type":"ContainerStarted","Data":"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef"} Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.250502 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="dnsmasq-dns" containerID="cri-o://a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef" gracePeriod=10 Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.252206 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.268320 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-rx6vw" podStartSLOduration=5.54174089 podStartE2EDuration="23.268292792s" podCreationTimestamp="2025-12-01 06:56:13 +0000 UTC" firstStartedPulling="2025-12-01 06:56:14.763838384 +0000 UTC m=+784.328851357" lastFinishedPulling="2025-12-01 06:56:32.490390286 +0000 UTC m=+802.055403259" observedRunningTime="2025-12-01 06:56:36.260714973 +0000 UTC m=+805.825727966" watchObservedRunningTime="2025-12-01 06:56:36.268292792 +0000 UTC m=+805.833305764" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.279130 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" podStartSLOduration=5.545456794 podStartE2EDuration="34.279113277s" podCreationTimestamp="2025-12-01 06:56:02 +0000 UTC" firstStartedPulling="2025-12-01 06:56:03.7150071 +0000 UTC m=+773.280020073" lastFinishedPulling="2025-12-01 06:56:32.448663583 +0000 UTC m=+802.013676556" observedRunningTime="2025-12-01 06:56:36.2756802 +0000 UTC m=+805.840693173" watchObservedRunningTime="2025-12-01 06:56:36.279113277 +0000 UTC m=+805.844126251" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.325739 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.364982 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.382849 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:56:36 crc kubenswrapper[4632]: W1201 06:56:36.388775 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4b8c416_341f_4a74_bd75_7901f5dfea22.slice/crio-380d867934aefc1572b2aa3a97c63bb853a5529b948ab9d314dc02fd4ce86452 WatchSource:0}: Error finding container 380d867934aefc1572b2aa3a97c63bb853a5529b948ab9d314dc02fd4ce86452: Status 404 returned error can't find the container with id 380d867934aefc1572b2aa3a97c63bb853a5529b948ab9d314dc02fd4ce86452 Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.672609 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.736428 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.789670 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc\") pod \"e5fcf062-7331-4a42-8f87-cb6fba34de17\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.789721 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config\") pod \"e5fcf062-7331-4a42-8f87-cb6fba34de17\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.789948 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dljm5\" (UniqueName: \"kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5\") pod \"e5fcf062-7331-4a42-8f87-cb6fba34de17\" (UID: \"e5fcf062-7331-4a42-8f87-cb6fba34de17\") " Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.804018 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5" (OuterVolumeSpecName: "kube-api-access-dljm5") pod "e5fcf062-7331-4a42-8f87-cb6fba34de17" (UID: "e5fcf062-7331-4a42-8f87-cb6fba34de17"). InnerVolumeSpecName "kube-api-access-dljm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.840892 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e5fcf062-7331-4a42-8f87-cb6fba34de17" (UID: "e5fcf062-7331-4a42-8f87-cb6fba34de17"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.848731 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config" (OuterVolumeSpecName: "config") pod "e5fcf062-7331-4a42-8f87-cb6fba34de17" (UID: "e5fcf062-7331-4a42-8f87-cb6fba34de17"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.892522 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dljm5\" (UniqueName: \"kubernetes.io/projected/e5fcf062-7331-4a42-8f87-cb6fba34de17-kube-api-access-dljm5\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.892553 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:36 crc kubenswrapper[4632]: I1201 06:56:36.892563 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5fcf062-7331-4a42-8f87-cb6fba34de17-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.259798 4632 generic.go:334] "Generic (PLEG): container finished" podID="ecd36c5b-96fc-49af-b8f2-634fcf854cfa" containerID="b3c5866325c129c491c9d80bbd9823e769c8f42607d5d8cb2580b3567669dfa9" exitCode=0 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.259883 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ecd36c5b-96fc-49af-b8f2-634fcf854cfa","Type":"ContainerDied","Data":"b3c5866325c129c491c9d80bbd9823e769c8f42607d5d8cb2580b3567669dfa9"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.262911 4632 generic.go:334] "Generic (PLEG): container finished" podID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerID="a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5" exitCode=0 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.263015 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" event={"ID":"d384efc7-ec80-4645-88a9-7adb93e8f4d5","Type":"ContainerDied","Data":"a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.266298 4632 generic.go:334] "Generic (PLEG): container finished" podID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerID="a6d11d9047346eff3762238e870537db97e4caf8ece4dfdc0633237da114cf59" exitCode=0 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.266478 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" event={"ID":"c4b8c416-341f-4a74-bd75-7901f5dfea22","Type":"ContainerDied","Data":"a6d11d9047346eff3762238e870537db97e4caf8ece4dfdc0633237da114cf59"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.266542 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" event={"ID":"c4b8c416-341f-4a74-bd75-7901f5dfea22","Type":"ContainerStarted","Data":"380d867934aefc1572b2aa3a97c63bb853a5529b948ab9d314dc02fd4ce86452"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.275043 4632 generic.go:334] "Generic (PLEG): container finished" podID="d73ee8ba-1384-40b9-bbe8-62425cd044db" containerID="bac35727270b6cf90b88cfbc1af1f43a67057c10a7a23903a8beef07e2aa4ad1" exitCode=0 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.275113 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d73ee8ba-1384-40b9-bbe8-62425cd044db","Type":"ContainerDied","Data":"bac35727270b6cf90b88cfbc1af1f43a67057c10a7a23903a8beef07e2aa4ad1"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.277612 4632 generic.go:334] "Generic (PLEG): container finished" podID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerID="a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef" exitCode=0 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.277783 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" event={"ID":"e5fcf062-7331-4a42-8f87-cb6fba34de17","Type":"ContainerDied","Data":"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.277866 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" event={"ID":"e5fcf062-7331-4a42-8f87-cb6fba34de17","Type":"ContainerDied","Data":"35a56697bf06ba4b8b0a48c6cfe6c5ed1bfc5d80b8cefe76562fc7e9a5109271"} Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.277891 4632 scope.go:117] "RemoveContainer" containerID="a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.278263 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f75b6684c-v6km2" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.279780 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.280478 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="dnsmasq-dns" containerID="cri-o://774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4" gracePeriod=10 Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.299889 4632 scope.go:117] "RemoveContainer" containerID="ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.336420 4632 scope.go:117] "RemoveContainer" containerID="a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef" Dec 01 06:56:37 crc kubenswrapper[4632]: E1201 06:56:37.338451 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef\": container with ID starting with a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef not found: ID does not exist" containerID="a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.338498 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef"} err="failed to get container status \"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef\": rpc error: code = NotFound desc = could not find container \"a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef\": container with ID starting with a8b5de737dad3bd059387092e047aeb0a0b5627447afc19af2d6be7f175d15ef not found: ID does not exist" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.338526 4632 scope.go:117] "RemoveContainer" containerID="ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66" Dec 01 06:56:37 crc kubenswrapper[4632]: E1201 06:56:37.339006 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66\": container with ID starting with ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66 not found: ID does not exist" containerID="ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.339060 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66"} err="failed to get container status \"ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66\": rpc error: code = NotFound desc = could not find container \"ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66\": container with ID starting with ffb41324d63f8996f0218b34166cbd0e21a63842529b71709079989cffcb5b66 not found: ID does not exist" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.379164 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.385820 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f75b6684c-v6km2"] Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.673744 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.684545 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.713436 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.819389 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config\") pod \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.819462 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g6hj\" (UniqueName: \"kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj\") pod \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.819567 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc\") pod \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\" (UID: \"5f8f3547-1296-4a5b-b1a5-427ee1c1d763\") " Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.832558 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj" (OuterVolumeSpecName: "kube-api-access-8g6hj") pod "5f8f3547-1296-4a5b-b1a5-427ee1c1d763" (UID: "5f8f3547-1296-4a5b-b1a5-427ee1c1d763"). InnerVolumeSpecName "kube-api-access-8g6hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.858863 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5f8f3547-1296-4a5b-b1a5-427ee1c1d763" (UID: "5f8f3547-1296-4a5b-b1a5-427ee1c1d763"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.859659 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config" (OuterVolumeSpecName: "config") pod "5f8f3547-1296-4a5b-b1a5-427ee1c1d763" (UID: "5f8f3547-1296-4a5b-b1a5-427ee1c1d763"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.922866 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.922915 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g6hj\" (UniqueName: \"kubernetes.io/projected/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-kube-api-access-8g6hj\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:37 crc kubenswrapper[4632]: I1201 06:56:37.922927 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5f8f3547-1296-4a5b-b1a5-427ee1c1d763-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.288280 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"ecd36c5b-96fc-49af-b8f2-634fcf854cfa","Type":"ContainerStarted","Data":"5494466eadc01592b3a275514492729e9731f6e641c49f54349330fc2031e607"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.289881 4632 generic.go:334] "Generic (PLEG): container finished" podID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerID="774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4" exitCode=0 Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.289927 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" event={"ID":"5f8f3547-1296-4a5b-b1a5-427ee1c1d763","Type":"ContainerDied","Data":"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.289987 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" event={"ID":"5f8f3547-1296-4a5b-b1a5-427ee1c1d763","Type":"ContainerDied","Data":"d236698dc0ec71aa1df96763c76b02e36d6f16685645769d14dce321d8ff527a"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.290020 4632 scope.go:117] "RemoveContainer" containerID="774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.290106 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-687bbf6d65-czr5n" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.291410 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" event={"ID":"d384efc7-ec80-4645-88a9-7adb93e8f4d5","Type":"ContainerStarted","Data":"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.291586 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.293704 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" event={"ID":"c4b8c416-341f-4a74-bd75-7901f5dfea22","Type":"ContainerStarted","Data":"fc0607eb5aaea146e530d8b372f4ef314663e6068a3ffecf32148fb1bedebf66"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.294185 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.295655 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"d73ee8ba-1384-40b9-bbe8-62425cd044db","Type":"ContainerStarted","Data":"8c5393360a7f9f4870b197b2966885865a87eb61d085ec369e6a2b0a4e81bbf8"} Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.307527 4632 scope.go:117] "RemoveContainer" containerID="3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.324929 4632 scope.go:117] "RemoveContainer" containerID="774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4" Dec 01 06:56:38 crc kubenswrapper[4632]: E1201 06:56:38.325480 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4\": container with ID starting with 774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4 not found: ID does not exist" containerID="774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.325514 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4"} err="failed to get container status \"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4\": rpc error: code = NotFound desc = could not find container \"774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4\": container with ID starting with 774bbc322259d9082074f0a5bc27125944911b2b27d025644f89bd1ba86175a4 not found: ID does not exist" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.325537 4632 scope.go:117] "RemoveContainer" containerID="3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc" Dec 01 06:56:38 crc kubenswrapper[4632]: E1201 06:56:38.325940 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc\": container with ID starting with 3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc not found: ID does not exist" containerID="3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.325981 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc"} err="failed to get container status \"3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc\": rpc error: code = NotFound desc = could not find container \"3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc\": container with ID starting with 3306559e4a2f4f1d7d41bf82f4d7b2a1482ca5d8656cb73d3310080bad3edafc not found: ID does not exist" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.328917 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.329291 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=7.531896612 podStartE2EDuration="32.32927661s" podCreationTimestamp="2025-12-01 06:56:06 +0000 UTC" firstStartedPulling="2025-12-01 06:56:08.110889807 +0000 UTC m=+777.675902781" lastFinishedPulling="2025-12-01 06:56:32.908269806 +0000 UTC m=+802.473282779" observedRunningTime="2025-12-01 06:56:38.325299126 +0000 UTC m=+807.890312099" watchObservedRunningTime="2025-12-01 06:56:38.32927661 +0000 UTC m=+807.894289583" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.331539 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.1782978 podStartE2EDuration="34.331526876s" podCreationTimestamp="2025-12-01 06:56:04 +0000 UTC" firstStartedPulling="2025-12-01 06:56:06.765297683 +0000 UTC m=+776.330310657" lastFinishedPulling="2025-12-01 06:56:32.91852676 +0000 UTC m=+802.483539733" observedRunningTime="2025-12-01 06:56:38.306398071 +0000 UTC m=+807.871411045" watchObservedRunningTime="2025-12-01 06:56:38.331526876 +0000 UTC m=+807.896539849" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.343717 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" podStartSLOduration=3.343704754 podStartE2EDuration="3.343704754s" podCreationTimestamp="2025-12-01 06:56:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:38.339592976 +0000 UTC m=+807.904605949" watchObservedRunningTime="2025-12-01 06:56:38.343704754 +0000 UTC m=+807.908717727" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.359090 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" podStartSLOduration=3.359074345 podStartE2EDuration="3.359074345s" podCreationTimestamp="2025-12-01 06:56:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:38.355019804 +0000 UTC m=+807.920032777" watchObservedRunningTime="2025-12-01 06:56:38.359074345 +0000 UTC m=+807.924087318" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.371874 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.378627 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-687bbf6d65-czr5n"] Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.759707 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" path="/var/lib/kubelet/pods/5f8f3547-1296-4a5b-b1a5-427ee1c1d763/volumes" Dec 01 06:56:38 crc kubenswrapper[4632]: I1201 06:56:38.760422 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" path="/var/lib/kubelet/pods/e5fcf062-7331-4a42-8f87-cb6fba34de17/volumes" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.332594 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.436562 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 01 06:56:39 crc kubenswrapper[4632]: E1201 06:56:39.436928 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.436948 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: E1201 06:56:39.436965 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.436971 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: E1201 06:56:39.436978 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="init" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.436985 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="init" Dec 01 06:56:39 crc kubenswrapper[4632]: E1201 06:56:39.436994 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="init" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.436999 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="init" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.437167 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5fcf062-7331-4a42-8f87-cb6fba34de17" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.437181 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8f3547-1296-4a5b-b1a5-427ee1c1d763" containerName="dnsmasq-dns" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.438011 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.440050 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.440284 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-cg5dq" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.440463 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.444225 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.461109 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552069 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552122 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zh92\" (UniqueName: \"kubernetes.io/projected/0182dc49-3707-4d2e-a867-5eb37db588f8-kube-api-access-4zh92\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552209 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-config\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552235 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552252 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552272 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.552444 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-scripts\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654343 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-config\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654414 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654448 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654479 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654580 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-scripts\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654624 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.654658 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zh92\" (UniqueName: \"kubernetes.io/projected/0182dc49-3707-4d2e-a867-5eb37db588f8-kube-api-access-4zh92\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.655203 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-config\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.655579 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.655886 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0182dc49-3707-4d2e-a867-5eb37db588f8-scripts\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.661748 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.661752 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.662227 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0182dc49-3707-4d2e-a867-5eb37db588f8-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.671687 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zh92\" (UniqueName: \"kubernetes.io/projected/0182dc49-3707-4d2e-a867-5eb37db588f8-kube-api-access-4zh92\") pod \"ovn-northd-0\" (UID: \"0182dc49-3707-4d2e-a867-5eb37db588f8\") " pod="openstack/ovn-northd-0" Dec 01 06:56:39 crc kubenswrapper[4632]: I1201 06:56:39.756192 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 01 06:56:40 crc kubenswrapper[4632]: I1201 06:56:40.209476 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 01 06:56:40 crc kubenswrapper[4632]: I1201 06:56:40.310238 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0182dc49-3707-4d2e-a867-5eb37db588f8","Type":"ContainerStarted","Data":"e72d97c8749ebf01e8265f02dfb8f1a8f3e8fe2dcc96ecbdbe0079204cc28958"} Dec 01 06:56:42 crc kubenswrapper[4632]: I1201 06:56:42.326170 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0182dc49-3707-4d2e-a867-5eb37db588f8","Type":"ContainerStarted","Data":"6b3cddb58f2d761d194b55b4dcc03f0f6b6d61a383bc9f3da900cde09f785938"} Dec 01 06:56:42 crc kubenswrapper[4632]: I1201 06:56:42.326764 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"0182dc49-3707-4d2e-a867-5eb37db588f8","Type":"ContainerStarted","Data":"8fe34f717d71481e3b3503f0e4523b564faf40c75166e534c326214c56187821"} Dec 01 06:56:42 crc kubenswrapper[4632]: I1201 06:56:42.326783 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 01 06:56:42 crc kubenswrapper[4632]: I1201 06:56:42.344545 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.9882004690000001 podStartE2EDuration="3.344526337s" podCreationTimestamp="2025-12-01 06:56:39 +0000 UTC" firstStartedPulling="2025-12-01 06:56:40.218699327 +0000 UTC m=+809.783712301" lastFinishedPulling="2025-12-01 06:56:41.575025206 +0000 UTC m=+811.140038169" observedRunningTime="2025-12-01 06:56:42.342622233 +0000 UTC m=+811.907635207" watchObservedRunningTime="2025-12-01 06:56:42.344526337 +0000 UTC m=+811.909539300" Dec 01 06:56:42 crc kubenswrapper[4632]: I1201 06:56:42.918266 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 01 06:56:45 crc kubenswrapper[4632]: I1201 06:56:45.824583 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:45 crc kubenswrapper[4632]: I1201 06:56:45.993761 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.031423 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.299780 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.299825 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.357803 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.383670 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="dnsmasq-dns" containerID="cri-o://10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c" gracePeriod=10 Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.435182 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.753617 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.908026 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb\") pod \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.908081 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc\") pod \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.908104 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config\") pod \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.908152 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdwv2\" (UniqueName: \"kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2\") pod \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\" (UID: \"d384efc7-ec80-4645-88a9-7adb93e8f4d5\") " Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.913743 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2" (OuterVolumeSpecName: "kube-api-access-gdwv2") pod "d384efc7-ec80-4645-88a9-7adb93e8f4d5" (UID: "d384efc7-ec80-4645-88a9-7adb93e8f4d5"). InnerVolumeSpecName "kube-api-access-gdwv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.949486 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config" (OuterVolumeSpecName: "config") pod "d384efc7-ec80-4645-88a9-7adb93e8f4d5" (UID: "d384efc7-ec80-4645-88a9-7adb93e8f4d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.950472 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "d384efc7-ec80-4645-88a9-7adb93e8f4d5" (UID: "d384efc7-ec80-4645-88a9-7adb93e8f4d5"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:46 crc kubenswrapper[4632]: I1201 06:56:46.951028 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d384efc7-ec80-4645-88a9-7adb93e8f4d5" (UID: "d384efc7-ec80-4645-88a9-7adb93e8f4d5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.010235 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.010264 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.010275 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d384efc7-ec80-4645-88a9-7adb93e8f4d5-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.010285 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdwv2\" (UniqueName: \"kubernetes.io/projected/d384efc7-ec80-4645-88a9-7adb93e8f4d5-kube-api-access-gdwv2\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.390085 4632 generic.go:334] "Generic (PLEG): container finished" podID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerID="10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c" exitCode=0 Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.390994 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.391413 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" event={"ID":"d384efc7-ec80-4645-88a9-7adb93e8f4d5","Type":"ContainerDied","Data":"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c"} Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.391465 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78d59965b5-7g8dg" event={"ID":"d384efc7-ec80-4645-88a9-7adb93e8f4d5","Type":"ContainerDied","Data":"dc50c6c8024db65b5c0545458c301a3ab26baf175aacd590d131933fafa48330"} Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.391486 4632 scope.go:117] "RemoveContainer" containerID="10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.406285 4632 scope.go:117] "RemoveContainer" containerID="a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.421426 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.426597 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78d59965b5-7g8dg"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.443027 4632 scope.go:117] "RemoveContainer" containerID="10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c" Dec 01 06:56:47 crc kubenswrapper[4632]: E1201 06:56:47.443541 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c\": container with ID starting with 10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c not found: ID does not exist" containerID="10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.443662 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c"} err="failed to get container status \"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c\": rpc error: code = NotFound desc = could not find container \"10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c\": container with ID starting with 10e6ac66497c76fb64968a99f15b0e82e56e584c46b217abbe1d47316cce6b1c not found: ID does not exist" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.443738 4632 scope.go:117] "RemoveContainer" containerID="a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5" Dec 01 06:56:47 crc kubenswrapper[4632]: E1201 06:56:47.444113 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5\": container with ID starting with a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5 not found: ID does not exist" containerID="a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.444153 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5"} err="failed to get container status \"a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5\": rpc error: code = NotFound desc = could not find container \"a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5\": container with ID starting with a32d7dc304d0750d33c5e3c028c93487f83ca17cd0e7037fe8dcea340d16f0e5 not found: ID does not exist" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.553276 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-b211-account-create-update-sj67v"] Dec 01 06:56:47 crc kubenswrapper[4632]: E1201 06:56:47.553795 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="dnsmasq-dns" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.553899 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="dnsmasq-dns" Dec 01 06:56:47 crc kubenswrapper[4632]: E1201 06:56:47.553981 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="init" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.554038 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="init" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.554301 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" containerName="dnsmasq-dns" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.554892 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.556784 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.564618 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-lwxrf"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.565731 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.571622 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b211-account-create-update-sj67v"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.589399 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.590743 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lwxrf"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.599186 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.652812 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.719434 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.719564 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.719627 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swsqf\" (UniqueName: \"kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.719706 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt5kj\" (UniqueName: \"kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.783632 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-r4zxk"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.784857 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.794731 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-r4zxk"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.821217 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swsqf\" (UniqueName: \"kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.821398 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt5kj\" (UniqueName: \"kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.821495 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.821619 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.822179 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.822597 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.837453 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt5kj\" (UniqueName: \"kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj\") pod \"keystone-db-create-lwxrf\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.837644 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swsqf\" (UniqueName: \"kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf\") pod \"keystone-b211-account-create-update-sj67v\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.868824 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-81a0-account-create-update-vb5bg"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.869874 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.872111 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.873300 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.876687 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-81a0-account-create-update-vb5bg"] Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.884814 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.923432 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvkff\" (UniqueName: \"kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:47 crc kubenswrapper[4632]: I1201 06:56:47.923673 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.025500 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvkff\" (UniqueName: \"kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.026515 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjdzx\" (UniqueName: \"kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.026706 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.026831 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.027911 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.042064 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvkff\" (UniqueName: \"kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff\") pod \"placement-db-create-r4zxk\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.100530 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.128275 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjdzx\" (UniqueName: \"kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.128370 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.129020 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.145323 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjdzx\" (UniqueName: \"kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx\") pod \"placement-81a0-account-create-update-vb5bg\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.194417 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.293547 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-b211-account-create-update-sj67v"] Dec 01 06:56:48 crc kubenswrapper[4632]: W1201 06:56:48.302180 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27e3e955_071a_41fe_a6b1_bb6e6bbd0426.slice/crio-1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a WatchSource:0}: Error finding container 1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a: Status 404 returned error can't find the container with id 1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.338135 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-lwxrf"] Dec 01 06:56:48 crc kubenswrapper[4632]: W1201 06:56:48.346295 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb51f3803_021a_45cc_a967_4ad902df39f2.slice/crio-f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282 WatchSource:0}: Error finding container f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282: Status 404 returned error can't find the container with id f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282 Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.405789 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b211-account-create-update-sj67v" event={"ID":"27e3e955-071a-41fe-a6b1-bb6e6bbd0426","Type":"ContainerStarted","Data":"1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a"} Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.409660 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lwxrf" event={"ID":"b51f3803-021a-45cc-a967-4ad902df39f2","Type":"ContainerStarted","Data":"f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282"} Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.490466 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-r4zxk"] Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.491146 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 01 06:56:48 crc kubenswrapper[4632]: W1201 06:56:48.493888 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod072f1a7f_3b2a_421c_a000_1bb398725d35.slice/crio-95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6 WatchSource:0}: Error finding container 95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6: Status 404 returned error can't find the container with id 95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6 Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.590097 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-81a0-account-create-update-vb5bg"] Dec 01 06:56:48 crc kubenswrapper[4632]: W1201 06:56:48.604208 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14a44dd2_610d_4a19_8301_08aa47df8192.slice/crio-046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad WatchSource:0}: Error finding container 046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad: Status 404 returned error can't find the container with id 046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad Dec 01 06:56:48 crc kubenswrapper[4632]: I1201 06:56:48.759044 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d384efc7-ec80-4645-88a9-7adb93e8f4d5" path="/var/lib/kubelet/pods/d384efc7-ec80-4645-88a9-7adb93e8f4d5/volumes" Dec 01 06:56:49 crc kubenswrapper[4632]: E1201 06:56:49.088952 4632 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod072f1a7f_3b2a_421c_a000_1bb398725d35.slice/crio-e9db658058f1ee45e7e1d18a5b67ffc8a4859bfc83bc0dc3ddcc34c33b6c6dd8.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14a44dd2_610d_4a19_8301_08aa47df8192.slice/crio-conmon-fd79fbda2e9a90e8d0536b1bbf30d130c251d03e6f1b9c149ed800f286861e71.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod072f1a7f_3b2a_421c_a000_1bb398725d35.slice/crio-conmon-e9db658058f1ee45e7e1d18a5b67ffc8a4859bfc83bc0dc3ddcc34c33b6c6dd8.scope\": RecentStats: unable to find data in memory cache]" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.426709 4632 generic.go:334] "Generic (PLEG): container finished" podID="14a44dd2-610d-4a19-8301-08aa47df8192" containerID="fd79fbda2e9a90e8d0536b1bbf30d130c251d03e6f1b9c149ed800f286861e71" exitCode=0 Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.426824 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-81a0-account-create-update-vb5bg" event={"ID":"14a44dd2-610d-4a19-8301-08aa47df8192","Type":"ContainerDied","Data":"fd79fbda2e9a90e8d0536b1bbf30d130c251d03e6f1b9c149ed800f286861e71"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.427039 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-81a0-account-create-update-vb5bg" event={"ID":"14a44dd2-610d-4a19-8301-08aa47df8192","Type":"ContainerStarted","Data":"046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.429381 4632 generic.go:334] "Generic (PLEG): container finished" podID="27e3e955-071a-41fe-a6b1-bb6e6bbd0426" containerID="9a35cd51125bb19cc432f2c69d657409db7348508467c35cdecdee984a7a9a96" exitCode=0 Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.429424 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b211-account-create-update-sj67v" event={"ID":"27e3e955-071a-41fe-a6b1-bb6e6bbd0426","Type":"ContainerDied","Data":"9a35cd51125bb19cc432f2c69d657409db7348508467c35cdecdee984a7a9a96"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.433937 4632 generic.go:334] "Generic (PLEG): container finished" podID="b51f3803-021a-45cc-a967-4ad902df39f2" containerID="aaf4ac54b73f8f7517868203a2a2fc0249c42c543e12e95aefca6a3c7e9702ed" exitCode=0 Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.434027 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lwxrf" event={"ID":"b51f3803-021a-45cc-a967-4ad902df39f2","Type":"ContainerDied","Data":"aaf4ac54b73f8f7517868203a2a2fc0249c42c543e12e95aefca6a3c7e9702ed"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.436391 4632 generic.go:334] "Generic (PLEG): container finished" podID="072f1a7f-3b2a-421c-a000-1bb398725d35" containerID="e9db658058f1ee45e7e1d18a5b67ffc8a4859bfc83bc0dc3ddcc34c33b6c6dd8" exitCode=0 Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.436491 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r4zxk" event={"ID":"072f1a7f-3b2a-421c-a000-1bb398725d35","Type":"ContainerDied","Data":"e9db658058f1ee45e7e1d18a5b67ffc8a4859bfc83bc0dc3ddcc34c33b6c6dd8"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.436531 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r4zxk" event={"ID":"072f1a7f-3b2a-421c-a000-1bb398725d35","Type":"ContainerStarted","Data":"95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6"} Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.537757 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.539114 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.559449 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.663408 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.663452 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.663479 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.663535 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9g2f\" (UniqueName: \"kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.663559 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.765403 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9g2f\" (UniqueName: \"kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.765469 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.765633 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.765660 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.765685 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.766496 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.766531 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.766575 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.766753 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.782927 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9g2f\" (UniqueName: \"kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f\") pod \"dnsmasq-dns-7c9c9f7685-tp77b\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:49 crc kubenswrapper[4632]: I1201 06:56:49.875939 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.297163 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:56:50 crc kubenswrapper[4632]: W1201 06:56:50.300137 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64843eea_4a56_4384_bafe_1f16a3ca66c9.slice/crio-6e2bfdceaafed3353647a7dcb252ccd4e1965d86b66b3439b33363f24a856c16 WatchSource:0}: Error finding container 6e2bfdceaafed3353647a7dcb252ccd4e1965d86b66b3439b33363f24a856c16: Status 404 returned error can't find the container with id 6e2bfdceaafed3353647a7dcb252ccd4e1965d86b66b3439b33363f24a856c16 Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.445081 4632 generic.go:334] "Generic (PLEG): container finished" podID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerID="b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83" exitCode=0 Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.445453 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" event={"ID":"64843eea-4a56-4384-bafe-1f16a3ca66c9","Type":"ContainerDied","Data":"b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83"} Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.445492 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" event={"ID":"64843eea-4a56-4384-bafe-1f16a3ca66c9","Type":"ContainerStarted","Data":"6e2bfdceaafed3353647a7dcb252ccd4e1965d86b66b3439b33363f24a856c16"} Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.738042 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.744086 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.751218 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.751462 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.751627 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.753216 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-nj69r" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.761601 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.761839 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.892437 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjdzx\" (UniqueName: \"kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx\") pod \"14a44dd2-610d-4a19-8301-08aa47df8192\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.892498 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts\") pod \"14a44dd2-610d-4a19-8301-08aa47df8192\" (UID: \"14a44dd2-610d-4a19-8301-08aa47df8192\") " Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.892956 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.893055 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.893166 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-cache\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.893249 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-lock\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.893391 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzlpq\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-kube-api-access-pzlpq\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.894964 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "14a44dd2-610d-4a19-8301-08aa47df8192" (UID: "14a44dd2-610d-4a19-8301-08aa47df8192"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.899861 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx" (OuterVolumeSpecName: "kube-api-access-zjdzx") pod "14a44dd2-610d-4a19-8301-08aa47df8192" (UID: "14a44dd2-610d-4a19-8301-08aa47df8192"). InnerVolumeSpecName "kube-api-access-zjdzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.968982 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.974421 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.980308 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999117 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-cache\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999198 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-lock\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999285 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzlpq\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-kube-api-access-pzlpq\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999377 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999445 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999529 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/14a44dd2-610d-4a19-8301-08aa47df8192-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:50 crc kubenswrapper[4632]: I1201 06:56:50.999548 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjdzx\" (UniqueName: \"kubernetes.io/projected/14a44dd2-610d-4a19-8301-08aa47df8192-kube-api-access-zjdzx\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:50 crc kubenswrapper[4632]: E1201 06:56:50.999675 4632 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 06:56:50 crc kubenswrapper[4632]: E1201 06:56:50.999693 4632 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 06:56:50 crc kubenswrapper[4632]: E1201 06:56:50.999748 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift podName:b136b809-94ef-4a5e-86b6-d3652e7ce987 nodeName:}" failed. No retries permitted until 2025-12-01 06:56:51.499727174 +0000 UTC m=+821.064740147 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift") pod "swift-storage-0" (UID: "b136b809-94ef-4a5e-86b6-d3652e7ce987") : configmap "swift-ring-files" not found Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.000336 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-cache\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.000632 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/b136b809-94ef-4a5e-86b6-d3652e7ce987-lock\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.000713 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.022645 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.034411 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzlpq\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-kube-api-access-pzlpq\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100251 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts\") pod \"072f1a7f-3b2a-421c-a000-1bb398725d35\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100330 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bt5kj\" (UniqueName: \"kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj\") pod \"b51f3803-021a-45cc-a967-4ad902df39f2\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100385 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swsqf\" (UniqueName: \"kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf\") pod \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100448 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts\") pod \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\" (UID: \"27e3e955-071a-41fe-a6b1-bb6e6bbd0426\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100469 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvkff\" (UniqueName: \"kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff\") pod \"072f1a7f-3b2a-421c-a000-1bb398725d35\" (UID: \"072f1a7f-3b2a-421c-a000-1bb398725d35\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.100935 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "072f1a7f-3b2a-421c-a000-1bb398725d35" (UID: "072f1a7f-3b2a-421c-a000-1bb398725d35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101043 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts\") pod \"b51f3803-021a-45cc-a967-4ad902df39f2\" (UID: \"b51f3803-021a-45cc-a967-4ad902df39f2\") " Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101034 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27e3e955-071a-41fe-a6b1-bb6e6bbd0426" (UID: "27e3e955-071a-41fe-a6b1-bb6e6bbd0426"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101423 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b51f3803-021a-45cc-a967-4ad902df39f2" (UID: "b51f3803-021a-45cc-a967-4ad902df39f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101941 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b51f3803-021a-45cc-a967-4ad902df39f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101958 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/072f1a7f-3b2a-421c-a000-1bb398725d35-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.101969 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.103729 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj" (OuterVolumeSpecName: "kube-api-access-bt5kj") pod "b51f3803-021a-45cc-a967-4ad902df39f2" (UID: "b51f3803-021a-45cc-a967-4ad902df39f2"). InnerVolumeSpecName "kube-api-access-bt5kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.104647 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff" (OuterVolumeSpecName: "kube-api-access-fvkff") pod "072f1a7f-3b2a-421c-a000-1bb398725d35" (UID: "072f1a7f-3b2a-421c-a000-1bb398725d35"). InnerVolumeSpecName "kube-api-access-fvkff". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.104700 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf" (OuterVolumeSpecName: "kube-api-access-swsqf") pod "27e3e955-071a-41fe-a6b1-bb6e6bbd0426" (UID: "27e3e955-071a-41fe-a6b1-bb6e6bbd0426"). InnerVolumeSpecName "kube-api-access-swsqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199185 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-j5jrs"] Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.199604 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e3e955-071a-41fe-a6b1-bb6e6bbd0426" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199623 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e3e955-071a-41fe-a6b1-bb6e6bbd0426" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.199655 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="072f1a7f-3b2a-421c-a000-1bb398725d35" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199662 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="072f1a7f-3b2a-421c-a000-1bb398725d35" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.199671 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b51f3803-021a-45cc-a967-4ad902df39f2" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199679 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="b51f3803-021a-45cc-a967-4ad902df39f2" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.199693 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a44dd2-610d-4a19-8301-08aa47df8192" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199699 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a44dd2-610d-4a19-8301-08aa47df8192" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199842 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e3e955-071a-41fe-a6b1-bb6e6bbd0426" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199869 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="b51f3803-021a-45cc-a967-4ad902df39f2" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199885 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="072f1a7f-3b2a-421c-a000-1bb398725d35" containerName="mariadb-database-create" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.199898 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a44dd2-610d-4a19-8301-08aa47df8192" containerName="mariadb-account-create-update" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.200507 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.202338 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.202980 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.203941 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvkff\" (UniqueName: \"kubernetes.io/projected/072f1a7f-3b2a-421c-a000-1bb398725d35-kube-api-access-fvkff\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.203973 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bt5kj\" (UniqueName: \"kubernetes.io/projected/b51f3803-021a-45cc-a967-4ad902df39f2-kube-api-access-bt5kj\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.203983 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swsqf\" (UniqueName: \"kubernetes.io/projected/27e3e955-071a-41fe-a6b1-bb6e6bbd0426-kube-api-access-swsqf\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.204872 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.208319 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5jrs"] Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.305601 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.305661 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.305780 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.305886 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.305908 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.306063 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p598w\" (UniqueName: \"kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.306110 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408298 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408463 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408492 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408541 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p598w\" (UniqueName: \"kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408567 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408637 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.408677 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.409271 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.409824 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.410129 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.412123 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.412148 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.412707 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.424678 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p598w\" (UniqueName: \"kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w\") pod \"swift-ring-rebalance-j5jrs\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.455182 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-b211-account-create-update-sj67v" event={"ID":"27e3e955-071a-41fe-a6b1-bb6e6bbd0426","Type":"ContainerDied","Data":"1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a"} Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.455214 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-b211-account-create-update-sj67v" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.455234 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1843e72418e57816645e85a4ce83c65823410aba1e8c8925cd6c2e897ae0ea5a" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.456862 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-lwxrf" event={"ID":"b51f3803-021a-45cc-a967-4ad902df39f2","Type":"ContainerDied","Data":"f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282"} Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.456921 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0f84df5ad7e8cc0fdff0fea5e11f19538c529dd7bf08d0b1ee7c061a6ebd282" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.456878 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-lwxrf" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.458305 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-r4zxk" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.458374 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-r4zxk" event={"ID":"072f1a7f-3b2a-421c-a000-1bb398725d35","Type":"ContainerDied","Data":"95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6"} Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.458403 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95feb39914521d636f7b0f36e73fcc42d8b0d039c62db12b096006ac2f0327c6" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.459830 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-81a0-account-create-update-vb5bg" event={"ID":"14a44dd2-610d-4a19-8301-08aa47df8192","Type":"ContainerDied","Data":"046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad"} Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.459901 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="046ff1f57901544aa3b88a0500a1af699d23db89f79627d5f22d816962d400ad" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.460159 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-81a0-account-create-update-vb5bg" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.461397 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" event={"ID":"64843eea-4a56-4384-bafe-1f16a3ca66c9","Type":"ContainerStarted","Data":"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e"} Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.462047 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.510871 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.511020 4632 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.511047 4632 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 06:56:51 crc kubenswrapper[4632]: E1201 06:56:51.511102 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift podName:b136b809-94ef-4a5e-86b6-d3652e7ce987 nodeName:}" failed. No retries permitted until 2025-12-01 06:56:52.511084214 +0000 UTC m=+822.076097187 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift") pod "swift-storage-0" (UID: "b136b809-94ef-4a5e-86b6-d3652e7ce987") : configmap "swift-ring-files" not found Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.517963 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.828799 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" podStartSLOduration=2.828781043 podStartE2EDuration="2.828781043s" podCreationTimestamp="2025-12-01 06:56:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:56:51.498337672 +0000 UTC m=+821.063350645" watchObservedRunningTime="2025-12-01 06:56:51.828781043 +0000 UTC m=+821.393794016" Dec 01 06:56:51 crc kubenswrapper[4632]: W1201 06:56:51.930344 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6efb3189_8101_4364_93b9_d31c87b9fe71.slice/crio-d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c WatchSource:0}: Error finding container d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c: Status 404 returned error can't find the container with id d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c Dec 01 06:56:51 crc kubenswrapper[4632]: I1201 06:56:51.933113 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-j5jrs"] Dec 01 06:56:52 crc kubenswrapper[4632]: I1201 06:56:52.470744 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5jrs" event={"ID":"6efb3189-8101-4364-93b9-d31c87b9fe71","Type":"ContainerStarted","Data":"d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c"} Dec 01 06:56:52 crc kubenswrapper[4632]: I1201 06:56:52.528739 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:52 crc kubenswrapper[4632]: E1201 06:56:52.529613 4632 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 06:56:52 crc kubenswrapper[4632]: E1201 06:56:52.529640 4632 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 06:56:52 crc kubenswrapper[4632]: E1201 06:56:52.529690 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift podName:b136b809-94ef-4a5e-86b6-d3652e7ce987 nodeName:}" failed. No retries permitted until 2025-12-01 06:56:54.529674131 +0000 UTC m=+824.094687104 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift") pod "swift-storage-0" (UID: "b136b809-94ef-4a5e-86b6-d3652e7ce987") : configmap "swift-ring-files" not found Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.093220 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-8p84f"] Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.094252 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.100223 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8p84f"] Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.139130 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.139191 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmpq7\" (UniqueName: \"kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.198609 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-85a5-account-create-update-kglxb"] Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.199814 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.204074 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.205876 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-85a5-account-create-update-kglxb"] Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.241254 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.241334 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.241397 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmpq7\" (UniqueName: \"kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.241459 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwthc\" (UniqueName: \"kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.242221 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.259045 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmpq7\" (UniqueName: \"kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7\") pod \"glance-db-create-8p84f\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.341960 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwthc\" (UniqueName: \"kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.342272 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.342849 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.356632 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwthc\" (UniqueName: \"kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc\") pod \"glance-85a5-account-create-update-kglxb\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.419545 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8p84f" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.515943 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.816297 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-8p84f"] Dec 01 06:56:53 crc kubenswrapper[4632]: W1201 06:56:53.820395 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5d5f398_de6c_4cc8_8f29_44ea946e025c.slice/crio-17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479 WatchSource:0}: Error finding container 17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479: Status 404 returned error can't find the container with id 17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479 Dec 01 06:56:53 crc kubenswrapper[4632]: I1201 06:56:53.895717 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-85a5-account-create-update-kglxb"] Dec 01 06:56:53 crc kubenswrapper[4632]: W1201 06:56:53.909228 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32c84236_bf94_47e0_afab_4b0ce0c0bf36.slice/crio-cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec WatchSource:0}: Error finding container cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec: Status 404 returned error can't find the container with id cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.486269 4632 generic.go:334] "Generic (PLEG): container finished" podID="32c84236-bf94-47e0-afab-4b0ce0c0bf36" containerID="6f3f88cf8a1e5bebeea6cb4ae9b816b4fd2d2cd8c044eb290c254aa60c8d2bc0" exitCode=0 Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.486386 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-85a5-account-create-update-kglxb" event={"ID":"32c84236-bf94-47e0-afab-4b0ce0c0bf36","Type":"ContainerDied","Data":"6f3f88cf8a1e5bebeea6cb4ae9b816b4fd2d2cd8c044eb290c254aa60c8d2bc0"} Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.486664 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-85a5-account-create-update-kglxb" event={"ID":"32c84236-bf94-47e0-afab-4b0ce0c0bf36","Type":"ContainerStarted","Data":"cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec"} Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.488709 4632 generic.go:334] "Generic (PLEG): container finished" podID="b5d5f398-de6c-4cc8-8f29-44ea946e025c" containerID="d7293d7e53876ba338027a03fb1f0b9c2de4692e1f777ce2d474e3b2d012d56c" exitCode=0 Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.488795 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8p84f" event={"ID":"b5d5f398-de6c-4cc8-8f29-44ea946e025c","Type":"ContainerDied","Data":"d7293d7e53876ba338027a03fb1f0b9c2de4692e1f777ce2d474e3b2d012d56c"} Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.488827 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8p84f" event={"ID":"b5d5f398-de6c-4cc8-8f29-44ea946e025c","Type":"ContainerStarted","Data":"17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479"} Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.566086 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:54 crc kubenswrapper[4632]: E1201 06:56:54.566272 4632 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 06:56:54 crc kubenswrapper[4632]: E1201 06:56:54.566309 4632 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 06:56:54 crc kubenswrapper[4632]: E1201 06:56:54.566397 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift podName:b136b809-94ef-4a5e-86b6-d3652e7ce987 nodeName:}" failed. No retries permitted until 2025-12-01 06:56:58.566376785 +0000 UTC m=+828.131389768 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift") pod "swift-storage-0" (UID: "b136b809-94ef-4a5e-86b6-d3652e7ce987") : configmap "swift-ring-files" not found Dec 01 06:56:54 crc kubenswrapper[4632]: I1201 06:56:54.806769 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.825988 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.831514 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8p84f" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.892581 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmpq7\" (UniqueName: \"kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7\") pod \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.892703 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwthc\" (UniqueName: \"kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc\") pod \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.892729 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts\") pod \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\" (UID: \"b5d5f398-de6c-4cc8-8f29-44ea946e025c\") " Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.892859 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts\") pod \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\" (UID: \"32c84236-bf94-47e0-afab-4b0ce0c0bf36\") " Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.893440 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b5d5f398-de6c-4cc8-8f29-44ea946e025c" (UID: "b5d5f398-de6c-4cc8-8f29-44ea946e025c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.893442 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "32c84236-bf94-47e0-afab-4b0ce0c0bf36" (UID: "32c84236-bf94-47e0-afab-4b0ce0c0bf36"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.898441 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc" (OuterVolumeSpecName: "kube-api-access-dwthc") pod "32c84236-bf94-47e0-afab-4b0ce0c0bf36" (UID: "32c84236-bf94-47e0-afab-4b0ce0c0bf36"). InnerVolumeSpecName "kube-api-access-dwthc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.899349 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7" (OuterVolumeSpecName: "kube-api-access-qmpq7") pod "b5d5f398-de6c-4cc8-8f29-44ea946e025c" (UID: "b5d5f398-de6c-4cc8-8f29-44ea946e025c"). InnerVolumeSpecName "kube-api-access-qmpq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.995790 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmpq7\" (UniqueName: \"kubernetes.io/projected/b5d5f398-de6c-4cc8-8f29-44ea946e025c-kube-api-access-qmpq7\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.995842 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwthc\" (UniqueName: \"kubernetes.io/projected/32c84236-bf94-47e0-afab-4b0ce0c0bf36-kube-api-access-dwthc\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.995855 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b5d5f398-de6c-4cc8-8f29-44ea946e025c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:55.995879 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32c84236-bf94-47e0-afab-4b0ce0c0bf36-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.511032 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-85a5-account-create-update-kglxb" event={"ID":"32c84236-bf94-47e0-afab-4b0ce0c0bf36","Type":"ContainerDied","Data":"cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec"} Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.511104 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf9007cb35ba6f23e4b0e7672dfa56940286d309016e58217d313aae4abcb5ec" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.511055 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-85a5-account-create-update-kglxb" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.513757 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-8p84f" event={"ID":"b5d5f398-de6c-4cc8-8f29-44ea946e025c","Type":"ContainerDied","Data":"17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479"} Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.513806 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17fa22317de842bfdec976ff27088ca75245f632bb8616c0de4bbfd92b47a479" Dec 01 06:56:56 crc kubenswrapper[4632]: I1201 06:56:56.513830 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-8p84f" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.398595 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-984wj"] Dec 01 06:56:58 crc kubenswrapper[4632]: E1201 06:56:58.399190 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5d5f398-de6c-4cc8-8f29-44ea946e025c" containerName="mariadb-database-create" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.399205 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5d5f398-de6c-4cc8-8f29-44ea946e025c" containerName="mariadb-database-create" Dec 01 06:56:58 crc kubenswrapper[4632]: E1201 06:56:58.399224 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c84236-bf94-47e0-afab-4b0ce0c0bf36" containerName="mariadb-account-create-update" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.399231 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c84236-bf94-47e0-afab-4b0ce0c0bf36" containerName="mariadb-account-create-update" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.399418 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c84236-bf94-47e0-afab-4b0ce0c0bf36" containerName="mariadb-account-create-update" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.399441 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5d5f398-de6c-4cc8-8f29-44ea946e025c" containerName="mariadb-database-create" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.399942 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.407687 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-984wj"] Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.408414 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.408967 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8f9x2" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.445025 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmnjj\" (UniqueName: \"kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.445131 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.445169 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.445197 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.534458 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5jrs" event={"ID":"6efb3189-8101-4364-93b9-d31c87b9fe71","Type":"ContainerStarted","Data":"e48f001bd3811e57758e6c12b46f1004cdc0b97ca61a539e32c308b6867d367e"} Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.546661 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmnjj\" (UniqueName: \"kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.546730 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.546759 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.546780 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.549968 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-j5jrs" podStartSLOduration=1.8475792439999998 podStartE2EDuration="7.549952317s" podCreationTimestamp="2025-12-01 06:56:51 +0000 UTC" firstStartedPulling="2025-12-01 06:56:51.932640962 +0000 UTC m=+821.497653936" lastFinishedPulling="2025-12-01 06:56:57.635014035 +0000 UTC m=+827.200027009" observedRunningTime="2025-12-01 06:56:58.546721281 +0000 UTC m=+828.111734253" watchObservedRunningTime="2025-12-01 06:56:58.549952317 +0000 UTC m=+828.114965290" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.552218 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.552397 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.553348 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.562803 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmnjj\" (UniqueName: \"kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj\") pod \"glance-db-sync-984wj\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " pod="openstack/glance-db-sync-984wj" Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.648368 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:56:58 crc kubenswrapper[4632]: E1201 06:56:58.648621 4632 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 01 06:56:58 crc kubenswrapper[4632]: E1201 06:56:58.648650 4632 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 01 06:56:58 crc kubenswrapper[4632]: E1201 06:56:58.648718 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift podName:b136b809-94ef-4a5e-86b6-d3652e7ce987 nodeName:}" failed. No retries permitted until 2025-12-01 06:57:06.648698186 +0000 UTC m=+836.213711160 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift") pod "swift-storage-0" (UID: "b136b809-94ef-4a5e-86b6-d3652e7ce987") : configmap "swift-ring-files" not found Dec 01 06:56:58 crc kubenswrapper[4632]: I1201 06:56:58.715061 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-984wj" Dec 01 06:56:59 crc kubenswrapper[4632]: I1201 06:56:59.173809 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-984wj"] Dec 01 06:56:59 crc kubenswrapper[4632]: I1201 06:56:59.542893 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-984wj" event={"ID":"73583357-a8a2-4812-ba66-553cc7713bd4","Type":"ContainerStarted","Data":"250c0d09407196f00d25b1a72014fa5a4fd2b48b357e925cbaa209b3b072fe50"} Dec 01 06:56:59 crc kubenswrapper[4632]: I1201 06:56:59.878290 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:56:59 crc kubenswrapper[4632]: I1201 06:56:59.930666 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:56:59 crc kubenswrapper[4632]: I1201 06:56:59.930959 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="dnsmasq-dns" containerID="cri-o://fc0607eb5aaea146e530d8b372f4ef314663e6068a3ffecf32148fb1bedebf66" gracePeriod=10 Dec 01 06:57:00 crc kubenswrapper[4632]: I1201 06:57:00.552341 4632 generic.go:334] "Generic (PLEG): container finished" podID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerID="fc0607eb5aaea146e530d8b372f4ef314663e6068a3ffecf32148fb1bedebf66" exitCode=0 Dec 01 06:57:00 crc kubenswrapper[4632]: I1201 06:57:00.552402 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" event={"ID":"c4b8c416-341f-4a74-bd75-7901f5dfea22","Type":"ContainerDied","Data":"fc0607eb5aaea146e530d8b372f4ef314663e6068a3ffecf32148fb1bedebf66"} Dec 01 06:57:00 crc kubenswrapper[4632]: I1201 06:57:00.992932 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.106:5353: connect: connection refused" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.283543 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.418417 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlz8b\" (UniqueName: \"kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b\") pod \"c4b8c416-341f-4a74-bd75-7901f5dfea22\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.418510 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc\") pod \"c4b8c416-341f-4a74-bd75-7901f5dfea22\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.418540 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb\") pod \"c4b8c416-341f-4a74-bd75-7901f5dfea22\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.418610 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb\") pod \"c4b8c416-341f-4a74-bd75-7901f5dfea22\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.418634 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config\") pod \"c4b8c416-341f-4a74-bd75-7901f5dfea22\" (UID: \"c4b8c416-341f-4a74-bd75-7901f5dfea22\") " Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.425014 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b" (OuterVolumeSpecName: "kube-api-access-zlz8b") pod "c4b8c416-341f-4a74-bd75-7901f5dfea22" (UID: "c4b8c416-341f-4a74-bd75-7901f5dfea22"). InnerVolumeSpecName "kube-api-access-zlz8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.450589 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c4b8c416-341f-4a74-bd75-7901f5dfea22" (UID: "c4b8c416-341f-4a74-bd75-7901f5dfea22"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.451867 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config" (OuterVolumeSpecName: "config") pod "c4b8c416-341f-4a74-bd75-7901f5dfea22" (UID: "c4b8c416-341f-4a74-bd75-7901f5dfea22"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.455053 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c4b8c416-341f-4a74-bd75-7901f5dfea22" (UID: "c4b8c416-341f-4a74-bd75-7901f5dfea22"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.474296 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c4b8c416-341f-4a74-bd75-7901f5dfea22" (UID: "c4b8c416-341f-4a74-bd75-7901f5dfea22"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.520534 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.520564 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.520575 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlz8b\" (UniqueName: \"kubernetes.io/projected/c4b8c416-341f-4a74-bd75-7901f5dfea22-kube-api-access-zlz8b\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.520599 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.520608 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c4b8c416-341f-4a74-bd75-7901f5dfea22-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.567587 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" event={"ID":"c4b8c416-341f-4a74-bd75-7901f5dfea22","Type":"ContainerDied","Data":"380d867934aefc1572b2aa3a97c63bb853a5529b948ab9d314dc02fd4ce86452"} Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.567647 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cf6bcbc4c-kpftc" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.567657 4632 scope.go:117] "RemoveContainer" containerID="fc0607eb5aaea146e530d8b372f4ef314663e6068a3ffecf32148fb1bedebf66" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.592788 4632 scope.go:117] "RemoveContainer" containerID="a6d11d9047346eff3762238e870537db97e4caf8ece4dfdc0633237da114cf59" Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.599778 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.605577 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-cf6bcbc4c-kpftc"] Dec 01 06:57:02 crc kubenswrapper[4632]: I1201 06:57:02.760632 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" path="/var/lib/kubelet/pods/c4b8c416-341f-4a74-bd75-7901f5dfea22/volumes" Dec 01 06:57:04 crc kubenswrapper[4632]: I1201 06:57:04.151644 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hw5x4" podUID="fe332539-435d-44e0-bcf5-c47332ed1e55" containerName="ovn-controller" probeResult="failure" output=< Dec 01 06:57:04 crc kubenswrapper[4632]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 06:57:04 crc kubenswrapper[4632]: > Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.597272 4632 generic.go:334] "Generic (PLEG): container finished" podID="67043517-303b-4159-a030-1192c39b98dd" containerID="9b9c7b2f6b6d2cb3fc2cb5af9ba4600c190b2153eb2f1f44d58f42a457c11540" exitCode=0 Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.597391 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerDied","Data":"9b9c7b2f6b6d2cb3fc2cb5af9ba4600c190b2153eb2f1f44d58f42a457c11540"} Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.600219 4632 generic.go:334] "Generic (PLEG): container finished" podID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerID="9dc2cfc06da8b761d9017a11b5369af0ede286129a1012ceeccb6b18a2e91a0d" exitCode=0 Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.600319 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerDied","Data":"9dc2cfc06da8b761d9017a11b5369af0ede286129a1012ceeccb6b18a2e91a0d"} Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.604312 4632 generic.go:334] "Generic (PLEG): container finished" podID="6efb3189-8101-4364-93b9-d31c87b9fe71" containerID="e48f001bd3811e57758e6c12b46f1004cdc0b97ca61a539e32c308b6867d367e" exitCode=0 Dec 01 06:57:05 crc kubenswrapper[4632]: I1201 06:57:05.604387 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5jrs" event={"ID":"6efb3189-8101-4364-93b9-d31c87b9fe71","Type":"ContainerDied","Data":"e48f001bd3811e57758e6c12b46f1004cdc0b97ca61a539e32c308b6867d367e"} Dec 01 06:57:06 crc kubenswrapper[4632]: I1201 06:57:06.706636 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:57:06 crc kubenswrapper[4632]: I1201 06:57:06.712479 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b136b809-94ef-4a5e-86b6-d3652e7ce987-etc-swift\") pod \"swift-storage-0\" (UID: \"b136b809-94ef-4a5e-86b6-d3652e7ce987\") " pod="openstack/swift-storage-0" Dec 01 06:57:06 crc kubenswrapper[4632]: I1201 06:57:06.882157 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:57:06 crc kubenswrapper[4632]: I1201 06:57:06.975795 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011290 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011361 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011469 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011513 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p598w\" (UniqueName: \"kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011587 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011655 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.011735 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf\") pod \"6efb3189-8101-4364-93b9-d31c87b9fe71\" (UID: \"6efb3189-8101-4364-93b9-d31c87b9fe71\") " Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.012490 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.012756 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.015590 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w" (OuterVolumeSpecName: "kube-api-access-p598w") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "kube-api-access-p598w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.021371 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.030439 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts" (OuterVolumeSpecName: "scripts") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.033718 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.034253 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "6efb3189-8101-4364-93b9-d31c87b9fe71" (UID: "6efb3189-8101-4364-93b9-d31c87b9fe71"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114105 4632 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114339 4632 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114349 4632 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114376 4632 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/6efb3189-8101-4364-93b9-d31c87b9fe71-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114384 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6efb3189-8101-4364-93b9-d31c87b9fe71-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114393 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6efb3189-8101-4364-93b9-d31c87b9fe71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.114402 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p598w\" (UniqueName: \"kubernetes.io/projected/6efb3189-8101-4364-93b9-d31c87b9fe71-kube-api-access-p598w\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.452505 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 01 06:57:07 crc kubenswrapper[4632]: W1201 06:57:07.459635 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb136b809_94ef_4a5e_86b6_d3652e7ce987.slice/crio-a26ef94750b463687b800dbee15cc1154ae5322497ceb91dd979fef540fd4f5c WatchSource:0}: Error finding container a26ef94750b463687b800dbee15cc1154ae5322497ceb91dd979fef540fd4f5c: Status 404 returned error can't find the container with id a26ef94750b463687b800dbee15cc1154ae5322497ceb91dd979fef540fd4f5c Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.628129 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerStarted","Data":"8fbdab565302cda55097719870e3fc369d5107311d572e57df688026b1c175a0"} Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.628368 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.631277 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"a26ef94750b463687b800dbee15cc1154ae5322497ceb91dd979fef540fd4f5c"} Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.633267 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerStarted","Data":"66eb42a52fa597607f9038e2d80f7371efa6b20d3c1a584179ca0caa3e6d763b"} Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.633547 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.637321 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-j5jrs" event={"ID":"6efb3189-8101-4364-93b9-d31c87b9fe71","Type":"ContainerDied","Data":"d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c"} Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.637380 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d265b39b3def396c846aeccdc245b1195114e8375761fddc687cd053a9adbc6c" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.637397 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-j5jrs" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.654181 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.565014507 podStartE2EDuration="1m4.654163854s" podCreationTimestamp="2025-12-01 06:56:03 +0000 UTC" firstStartedPulling="2025-12-01 06:56:05.168752034 +0000 UTC m=+774.733764997" lastFinishedPulling="2025-12-01 06:56:31.257901371 +0000 UTC m=+800.822914344" observedRunningTime="2025-12-01 06:57:07.651633529 +0000 UTC m=+837.216646503" watchObservedRunningTime="2025-12-01 06:57:07.654163854 +0000 UTC m=+837.219176828" Dec 01 06:57:07 crc kubenswrapper[4632]: I1201 06:57:07.684646 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.247441863 podStartE2EDuration="1m4.684631763s" podCreationTimestamp="2025-12-01 06:56:03 +0000 UTC" firstStartedPulling="2025-12-01 06:56:04.811322725 +0000 UTC m=+774.376335699" lastFinishedPulling="2025-12-01 06:56:32.248512627 +0000 UTC m=+801.813525599" observedRunningTime="2025-12-01 06:57:07.682042568 +0000 UTC m=+837.247055561" watchObservedRunningTime="2025-12-01 06:57:07.684631763 +0000 UTC m=+837.249644736" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.049195 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:09 crc kubenswrapper[4632]: E1201 06:57:09.050138 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="dnsmasq-dns" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.050151 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="dnsmasq-dns" Dec 01 06:57:09 crc kubenswrapper[4632]: E1201 06:57:09.050172 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="init" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.050179 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="init" Dec 01 06:57:09 crc kubenswrapper[4632]: E1201 06:57:09.050190 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6efb3189-8101-4364-93b9-d31c87b9fe71" containerName="swift-ring-rebalance" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.050196 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6efb3189-8101-4364-93b9-d31c87b9fe71" containerName="swift-ring-rebalance" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.050368 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4b8c416-341f-4a74-bd75-7901f5dfea22" containerName="dnsmasq-dns" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.050383 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6efb3189-8101-4364-93b9-d31c87b9fe71" containerName="swift-ring-rebalance" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.051502 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.056988 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.145132 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.145182 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.145273 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm2sf\" (UniqueName: \"kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.153511 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hw5x4" podUID="fe332539-435d-44e0-bcf5-c47332ed1e55" containerName="ovn-controller" probeResult="failure" output=< Dec 01 06:57:09 crc kubenswrapper[4632]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 06:57:09 crc kubenswrapper[4632]: > Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.214988 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.220102 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-rx6vw" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.246798 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.246845 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.246915 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm2sf\" (UniqueName: \"kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.247816 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.248076 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.278933 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm2sf\" (UniqueName: \"kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf\") pod \"redhat-marketplace-rf8lj\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.372370 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.452701 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hw5x4-config-4fpcn"] Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.454038 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.456397 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.470568 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4fpcn"] Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551584 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551663 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551703 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551748 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551799 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4nq5\" (UniqueName: \"kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.551861 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.652885 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4nq5\" (UniqueName: \"kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.652981 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653095 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653153 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653186 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653225 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653416 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653424 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653475 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653882 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.653884 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"5e6c539f2a4c930eee050fd438752a789b1c8cb7155e2b4e228c762e8620bd13"} Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.655071 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.667005 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4nq5\" (UniqueName: \"kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5\") pod \"ovn-controller-hw5x4-config-4fpcn\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:09 crc kubenswrapper[4632]: I1201 06:57:09.769371 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:14 crc kubenswrapper[4632]: I1201 06:57:14.151717 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-hw5x4" podUID="fe332539-435d-44e0-bcf5-c47332ed1e55" containerName="ovn-controller" probeResult="failure" output=< Dec 01 06:57:14 crc kubenswrapper[4632]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 01 06:57:14 crc kubenswrapper[4632]: > Dec 01 06:57:14 crc kubenswrapper[4632]: I1201 06:57:14.678324 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:14 crc kubenswrapper[4632]: I1201 06:57:14.699168 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"1b249c5a42880644664ca8c03ee14350d6190b666db2166404175ecaaaf637bb"} Dec 01 06:57:14 crc kubenswrapper[4632]: I1201 06:57:14.699221 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"dd3bf36660e4898c98c78aedf1bbabcd89d376046ae6c12b87a364baf7e3b759"} Dec 01 06:57:14 crc kubenswrapper[4632]: W1201 06:57:14.700902 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd8203a0_6352_4188_9f57_1cfaacddd93a.slice/crio-6c21165d50a106c21e5123bf82a591ee098e11455a9b50711430e2ee2c41539d WatchSource:0}: Error finding container 6c21165d50a106c21e5123bf82a591ee098e11455a9b50711430e2ee2c41539d: Status 404 returned error can't find the container with id 6c21165d50a106c21e5123bf82a591ee098e11455a9b50711430e2ee2c41539d Dec 01 06:57:14 crc kubenswrapper[4632]: I1201 06:57:14.741863 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4fpcn"] Dec 01 06:57:14 crc kubenswrapper[4632]: W1201 06:57:14.794333 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e791f91_5c8a_49b4_99a7_fb89280a6f26.slice/crio-f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81 WatchSource:0}: Error finding container f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81: Status 404 returned error can't find the container with id f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81 Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.709050 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"20fffc5f0d1c38d5431e89f7a9c2e013c963fda0047cf352a4f02829ccb85890"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.710940 4632 generic.go:334] "Generic (PLEG): container finished" podID="6e791f91-5c8a-49b4-99a7-fb89280a6f26" containerID="bff8a43eed106d70912c61bd4195a94ee6f21d8c63aa0225faf8a293e87ba298" exitCode=0 Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.710987 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4fpcn" event={"ID":"6e791f91-5c8a-49b4-99a7-fb89280a6f26","Type":"ContainerDied","Data":"bff8a43eed106d70912c61bd4195a94ee6f21d8c63aa0225faf8a293e87ba298"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.711007 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4fpcn" event={"ID":"6e791f91-5c8a-49b4-99a7-fb89280a6f26","Type":"ContainerStarted","Data":"f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.712410 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-984wj" event={"ID":"73583357-a8a2-4812-ba66-553cc7713bd4","Type":"ContainerStarted","Data":"8cf22025785f727164843e0e6a640dfb8cd8e0bc6f92f660d17e053cba35c8fb"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.714283 4632 generic.go:334] "Generic (PLEG): container finished" podID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerID="16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9" exitCode=0 Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.714342 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerDied","Data":"16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.714391 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerStarted","Data":"6c21165d50a106c21e5123bf82a591ee098e11455a9b50711430e2ee2c41539d"} Dec 01 06:57:15 crc kubenswrapper[4632]: I1201 06:57:15.762304 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-984wj" podStartSLOduration=2.6017065 podStartE2EDuration="17.762282785s" podCreationTimestamp="2025-12-01 06:56:58 +0000 UTC" firstStartedPulling="2025-12-01 06:56:59.180898605 +0000 UTC m=+828.745911577" lastFinishedPulling="2025-12-01 06:57:14.341474888 +0000 UTC m=+843.906487862" observedRunningTime="2025-12-01 06:57:15.755384769 +0000 UTC m=+845.320397743" watchObservedRunningTime="2025-12-01 06:57:15.762282785 +0000 UTC m=+845.327295758" Dec 01 06:57:16 crc kubenswrapper[4632]: I1201 06:57:16.725967 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"00c2244732be1b6c97bee0ca7a9667f596a9c232fc107448579b2bbb53100e31"} Dec 01 06:57:16 crc kubenswrapper[4632]: I1201 06:57:16.726321 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"bd3d4eaa26fc318a9991d290b7d906049369c536b9b195423ad82abd8f4b9157"} Dec 01 06:57:16 crc kubenswrapper[4632]: I1201 06:57:16.726336 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"7f5eb41f84fcb94daf9197be2c3bd22d8e77e7e2669652ccafe800438c2609b7"} Dec 01 06:57:16 crc kubenswrapper[4632]: I1201 06:57:16.972617 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.104588 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.104700 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.104847 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.104938 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105020 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4nq5\" (UniqueName: \"kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105034 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run" (OuterVolumeSpecName: "var-run") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105105 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105163 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105168 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts\") pod \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\" (UID: \"6e791f91-5c8a-49b4-99a7-fb89280a6f26\") " Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105778 4632 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105800 4632 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105797 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105813 4632 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6e791f91-5c8a-49b4-99a7-fb89280a6f26-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.105914 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts" (OuterVolumeSpecName: "scripts") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.114397 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5" (OuterVolumeSpecName: "kube-api-access-s4nq5") pod "6e791f91-5c8a-49b4-99a7-fb89280a6f26" (UID: "6e791f91-5c8a-49b4-99a7-fb89280a6f26"). InnerVolumeSpecName "kube-api-access-s4nq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.207083 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4nq5\" (UniqueName: \"kubernetes.io/projected/6e791f91-5c8a-49b4-99a7-fb89280a6f26-kube-api-access-s4nq5\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.207115 4632 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.207125 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e791f91-5c8a-49b4-99a7-fb89280a6f26-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.734143 4632 generic.go:334] "Generic (PLEG): container finished" podID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerID="5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb" exitCode=0 Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.734198 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerDied","Data":"5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb"} Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.740298 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"8919584ff59feb4cc03e4f7077b4c6fb1390331a6239779b4321607a8bc1a715"} Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.742552 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4fpcn" event={"ID":"6e791f91-5c8a-49b4-99a7-fb89280a6f26","Type":"ContainerDied","Data":"f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81"} Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.742586 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f805dc435d8f28287c7a705f57dd87eeb41a5ec1fcb1e3f4f42eb36ab504bd81" Dec 01 06:57:17 crc kubenswrapper[4632]: I1201 06:57:17.742609 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4fpcn" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.061034 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4fpcn"] Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.069320 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4fpcn"] Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.165202 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-hw5x4-config-4n9q6"] Dec 01 06:57:18 crc kubenswrapper[4632]: E1201 06:57:18.165900 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e791f91-5c8a-49b4-99a7-fb89280a6f26" containerName="ovn-config" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.165926 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e791f91-5c8a-49b4-99a7-fb89280a6f26" containerName="ovn-config" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.166141 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e791f91-5c8a-49b4-99a7-fb89280a6f26" containerName="ovn-config" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.167650 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.170952 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.179730 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4n9q6"] Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.225513 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.225742 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.225860 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.225959 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.226008 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.226122 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bvls\" (UniqueName: \"kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328226 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328310 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328377 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328408 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328465 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bvls\" (UniqueName: \"kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.328507 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.329310 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.329639 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.329720 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.329765 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.330945 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.354214 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bvls\" (UniqueName: \"kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls\") pod \"ovn-controller-hw5x4-config-4n9q6\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.492057 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.760395 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e791f91-5c8a-49b4-99a7-fb89280a6f26" path="/var/lib/kubelet/pods/6e791f91-5c8a-49b4-99a7-fb89280a6f26/volumes" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.761230 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerStarted","Data":"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97"} Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.761693 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"ef69e668545452524b8f62a0ea0401167375c7b4d54beff23c1eed4fb5e9493f"} Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.761737 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"37f1cc110c29a4938bcc66b4898aa80368f28ec31cb75c339dabe3444a8fe557"} Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.771820 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rf8lj" podStartSLOduration=7.198499526 podStartE2EDuration="9.771807411s" podCreationTimestamp="2025-12-01 06:57:09 +0000 UTC" firstStartedPulling="2025-12-01 06:57:15.7159599 +0000 UTC m=+845.280972873" lastFinishedPulling="2025-12-01 06:57:18.289267785 +0000 UTC m=+847.854280758" observedRunningTime="2025-12-01 06:57:18.769701919 +0000 UTC m=+848.334714892" watchObservedRunningTime="2025-12-01 06:57:18.771807411 +0000 UTC m=+848.336820384" Dec 01 06:57:18 crc kubenswrapper[4632]: I1201 06:57:18.915746 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4n9q6"] Dec 01 06:57:18 crc kubenswrapper[4632]: W1201 06:57:18.933665 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ac1caba_9d98_47fb_887d_165a64b2f5b5.slice/crio-1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa WatchSource:0}: Error finding container 1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa: Status 404 returned error can't find the container with id 1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.165700 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-hw5x4" Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.372973 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.373280 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.772993 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"03d0d4725e2ce0b0bb41251b5e50a2122267dc8b049ed9ed8db92040238b4f3b"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.773046 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"2057bb190551b654f359e915ffc85e473d93c60776e2c98f7621bde652a0f576"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.773059 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"1e316bb9ae4071842acd880fb7d9b7d8ac8e66494ca3e243b7331e999b856cba"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.773068 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"c40604341fda173410a0021ab7579fc67a83332d9f28ed183a28b447fd062ae7"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.773076 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"b136b809-94ef-4a5e-86b6-d3652e7ce987","Type":"ContainerStarted","Data":"27f965e59fa10760c8d5cb045c452bb533146a0cf2aa6cc1431b38f8de403ab3"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.775834 4632 generic.go:334] "Generic (PLEG): container finished" podID="73583357-a8a2-4812-ba66-553cc7713bd4" containerID="8cf22025785f727164843e0e6a640dfb8cd8e0bc6f92f660d17e053cba35c8fb" exitCode=0 Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.775920 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-984wj" event={"ID":"73583357-a8a2-4812-ba66-553cc7713bd4","Type":"ContainerDied","Data":"8cf22025785f727164843e0e6a640dfb8cd8e0bc6f92f660d17e053cba35c8fb"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.777669 4632 generic.go:334] "Generic (PLEG): container finished" podID="9ac1caba-9d98-47fb-887d-165a64b2f5b5" containerID="7187ccc6af03ccdc12b795755e6738169c6b5972ead85afb7a24f5dc83d66285" exitCode=0 Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.777731 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4n9q6" event={"ID":"9ac1caba-9d98-47fb-887d-165a64b2f5b5","Type":"ContainerDied","Data":"7187ccc6af03ccdc12b795755e6738169c6b5972ead85afb7a24f5dc83d66285"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.777757 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4n9q6" event={"ID":"9ac1caba-9d98-47fb-887d-165a64b2f5b5","Type":"ContainerStarted","Data":"1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa"} Dec 01 06:57:19 crc kubenswrapper[4632]: I1201 06:57:19.800587 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=20.009019171 podStartE2EDuration="30.800574277s" podCreationTimestamp="2025-12-01 06:56:49 +0000 UTC" firstStartedPulling="2025-12-01 06:57:07.461622356 +0000 UTC m=+837.026635329" lastFinishedPulling="2025-12-01 06:57:18.253177462 +0000 UTC m=+847.818190435" observedRunningTime="2025-12-01 06:57:19.796608847 +0000 UTC m=+849.361621820" watchObservedRunningTime="2025-12-01 06:57:19.800574277 +0000 UTC m=+849.365587251" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.041401 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.043197 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.045835 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.047254 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163176 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163296 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163331 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163441 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjfpf\" (UniqueName: \"kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163549 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.163720 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264484 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264537 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264561 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264596 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjfpf\" (UniqueName: \"kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264629 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.264684 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.265600 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.265752 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.265801 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.265765 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.266008 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.282313 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjfpf\" (UniqueName: \"kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf\") pod \"dnsmasq-dns-854cc8dd8c-b77x8\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.363708 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.410619 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-rf8lj" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="registry-server" probeResult="failure" output=< Dec 01 06:57:20 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 06:57:20 crc kubenswrapper[4632]: > Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.746595 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:20 crc kubenswrapper[4632]: I1201 06:57:20.793738 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" event={"ID":"0e9014ea-d672-4919-84e6-659f8f030d66","Type":"ContainerStarted","Data":"1c07088166e43e18de1dab207c72c63681ad1e31f5b091449ea0fe01d0ec3b5c"} Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.073030 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.098693 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-984wj" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.180856 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181076 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle\") pod \"73583357-a8a2-4812-ba66-553cc7713bd4\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181138 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bvls\" (UniqueName: \"kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181167 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data\") pod \"73583357-a8a2-4812-ba66-553cc7713bd4\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181301 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmnjj\" (UniqueName: \"kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj\") pod \"73583357-a8a2-4812-ba66-553cc7713bd4\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181396 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181445 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data\") pod \"73583357-a8a2-4812-ba66-553cc7713bd4\" (UID: \"73583357-a8a2-4812-ba66-553cc7713bd4\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181485 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181503 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181640 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181700 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run" (OuterVolumeSpecName: "var-run") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181752 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts\") pod \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\" (UID: \"9ac1caba-9d98-47fb-887d-165a64b2f5b5\") " Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.181727 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.182458 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.182662 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts" (OuterVolumeSpecName: "scripts") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.183531 4632 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.183625 4632 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.183685 4632 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9ac1caba-9d98-47fb-887d-165a64b2f5b5-var-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.183742 4632 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.183798 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9ac1caba-9d98-47fb-887d-165a64b2f5b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.187832 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls" (OuterVolumeSpecName: "kube-api-access-4bvls") pod "9ac1caba-9d98-47fb-887d-165a64b2f5b5" (UID: "9ac1caba-9d98-47fb-887d-165a64b2f5b5"). InnerVolumeSpecName "kube-api-access-4bvls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.188104 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "73583357-a8a2-4812-ba66-553cc7713bd4" (UID: "73583357-a8a2-4812-ba66-553cc7713bd4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.188457 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj" (OuterVolumeSpecName: "kube-api-access-dmnjj") pod "73583357-a8a2-4812-ba66-553cc7713bd4" (UID: "73583357-a8a2-4812-ba66-553cc7713bd4"). InnerVolumeSpecName "kube-api-access-dmnjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.202905 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73583357-a8a2-4812-ba66-553cc7713bd4" (UID: "73583357-a8a2-4812-ba66-553cc7713bd4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.215073 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data" (OuterVolumeSpecName: "config-data") pod "73583357-a8a2-4812-ba66-553cc7713bd4" (UID: "73583357-a8a2-4812-ba66-553cc7713bd4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.285622 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.285650 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bvls\" (UniqueName: \"kubernetes.io/projected/9ac1caba-9d98-47fb-887d-165a64b2f5b5-kube-api-access-4bvls\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.285662 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.285671 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmnjj\" (UniqueName: \"kubernetes.io/projected/73583357-a8a2-4812-ba66-553cc7713bd4-kube-api-access-dmnjj\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.285681 4632 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/73583357-a8a2-4812-ba66-553cc7713bd4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.802121 4632 generic.go:334] "Generic (PLEG): container finished" podID="0e9014ea-d672-4919-84e6-659f8f030d66" containerID="f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214" exitCode=0 Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.802196 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" event={"ID":"0e9014ea-d672-4919-84e6-659f8f030d66","Type":"ContainerDied","Data":"f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214"} Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.804078 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-984wj" event={"ID":"73583357-a8a2-4812-ba66-553cc7713bd4","Type":"ContainerDied","Data":"250c0d09407196f00d25b1a72014fa5a4fd2b48b357e925cbaa209b3b072fe50"} Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.804102 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="250c0d09407196f00d25b1a72014fa5a4fd2b48b357e925cbaa209b3b072fe50" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.804116 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-984wj" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.807180 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-hw5x4-config-4n9q6" event={"ID":"9ac1caba-9d98-47fb-887d-165a64b2f5b5","Type":"ContainerDied","Data":"1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa"} Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.807216 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d0a68838dd7bd884ebda9b482a4d487b2021c23ef5f38a2bb6e68c5b20737aa" Dec 01 06:57:21 crc kubenswrapper[4632]: I1201 06:57:21.807264 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-hw5x4-config-4n9q6" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.144413 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.166879 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4n9q6"] Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.181521 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-hw5x4-config-4n9q6"] Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.186970 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:22 crc kubenswrapper[4632]: E1201 06:57:22.187424 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73583357-a8a2-4812-ba66-553cc7713bd4" containerName="glance-db-sync" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.187462 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="73583357-a8a2-4812-ba66-553cc7713bd4" containerName="glance-db-sync" Dec 01 06:57:22 crc kubenswrapper[4632]: E1201 06:57:22.187514 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ac1caba-9d98-47fb-887d-165a64b2f5b5" containerName="ovn-config" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.187521 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ac1caba-9d98-47fb-887d-165a64b2f5b5" containerName="ovn-config" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.187734 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ac1caba-9d98-47fb-887d-165a64b2f5b5" containerName="ovn-config" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.187760 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="73583357-a8a2-4812-ba66-553cc7713bd4" containerName="glance-db-sync" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.188665 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.199012 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.307703 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.307784 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.307934 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5vm6\" (UniqueName: \"kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.308050 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.308090 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.308163 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410322 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410710 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410763 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410826 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5vm6\" (UniqueName: \"kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410928 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.410981 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.411314 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.411605 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.411714 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.411835 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.411892 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.424851 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5vm6\" (UniqueName: \"kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6\") pod \"dnsmasq-dns-74ccfcf59-pw9tv\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.511668 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.775903 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ac1caba-9d98-47fb-887d-165a64b2f5b5" path="/var/lib/kubelet/pods/9ac1caba-9d98-47fb-887d-165a64b2f5b5/volumes" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.819850 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" event={"ID":"0e9014ea-d672-4919-84e6-659f8f030d66","Type":"ContainerStarted","Data":"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094"} Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.820937 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.840701 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" podStartSLOduration=2.840681123 podStartE2EDuration="2.840681123s" podCreationTimestamp="2025-12-01 06:57:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:22.836480198 +0000 UTC m=+852.401493181" watchObservedRunningTime="2025-12-01 06:57:22.840681123 +0000 UTC m=+852.405694086" Dec 01 06:57:22 crc kubenswrapper[4632]: I1201 06:57:22.909031 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:23 crc kubenswrapper[4632]: I1201 06:57:23.828661 4632 generic.go:334] "Generic (PLEG): container finished" podID="bd873704-df6d-4418-ba91-4ecf29de059f" containerID="2680c8ca0faf875e6b868f3b99892f2fb7b6a3473a633b598d6299435bca6919" exitCode=0 Dec 01 06:57:23 crc kubenswrapper[4632]: I1201 06:57:23.828770 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" event={"ID":"bd873704-df6d-4418-ba91-4ecf29de059f","Type":"ContainerDied","Data":"2680c8ca0faf875e6b868f3b99892f2fb7b6a3473a633b598d6299435bca6919"} Dec 01 06:57:23 crc kubenswrapper[4632]: I1201 06:57:23.829094 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" event={"ID":"bd873704-df6d-4418-ba91-4ecf29de059f","Type":"ContainerStarted","Data":"cbd3a8a4998c297d45b14ec49e88ab4352af4a566838907ede1e41ddfd06ce39"} Dec 01 06:57:23 crc kubenswrapper[4632]: I1201 06:57:23.829271 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="dnsmasq-dns" containerID="cri-o://0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094" gracePeriod=10 Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.172845 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244483 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244540 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244703 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244747 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244792 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjfpf\" (UniqueName: \"kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.244866 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0\") pod \"0e9014ea-d672-4919-84e6-659f8f030d66\" (UID: \"0e9014ea-d672-4919-84e6-659f8f030d66\") " Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.250173 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf" (OuterVolumeSpecName: "kube-api-access-gjfpf") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "kube-api-access-gjfpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.279935 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.279995 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.280020 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.280860 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config" (OuterVolumeSpecName: "config") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.282242 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0e9014ea-d672-4919-84e6-659f8f030d66" (UID: "0e9014ea-d672-4919-84e6-659f8f030d66"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349632 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjfpf\" (UniqueName: \"kubernetes.io/projected/0e9014ea-d672-4919-84e6-659f8f030d66-kube-api-access-gjfpf\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349674 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349685 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349697 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349707 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.349735 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e9014ea-d672-4919-84e6-659f8f030d66-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.425581 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.462493 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:24 crc kubenswrapper[4632]: E1201 06:57:24.462871 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="init" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.462890 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="init" Dec 01 06:57:24 crc kubenswrapper[4632]: E1201 06:57:24.462901 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="dnsmasq-dns" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.462906 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="dnsmasq-dns" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.463100 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" containerName="dnsmasq-dns" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.464229 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.482824 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.553459 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.553678 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl6s7\" (UniqueName: \"kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.553738 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.655391 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.655557 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl6s7\" (UniqueName: \"kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.655598 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.655872 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.656000 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.669800 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl6s7\" (UniqueName: \"kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7\") pod \"certified-operators-qpnjp\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.713588 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.790642 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.847899 4632 generic.go:334] "Generic (PLEG): container finished" podID="0e9014ea-d672-4919-84e6-659f8f030d66" containerID="0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094" exitCode=0 Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.848050 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.848567 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" event={"ID":"0e9014ea-d672-4919-84e6-659f8f030d66","Type":"ContainerDied","Data":"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094"} Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.848595 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-854cc8dd8c-b77x8" event={"ID":"0e9014ea-d672-4919-84e6-659f8f030d66","Type":"ContainerDied","Data":"1c07088166e43e18de1dab207c72c63681ad1e31f5b091449ea0fe01d0ec3b5c"} Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.848613 4632 scope.go:117] "RemoveContainer" containerID="0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.855689 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" event={"ID":"bd873704-df6d-4418-ba91-4ecf29de059f","Type":"ContainerStarted","Data":"16e4cb4e10f1dcd6f09481ea16c2622eeb770565ca91a8bf7a1fcc13dce3d022"} Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.857069 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.892414 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" podStartSLOduration=2.892249192 podStartE2EDuration="2.892249192s" podCreationTimestamp="2025-12-01 06:57:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:24.882153527 +0000 UTC m=+854.447166500" watchObservedRunningTime="2025-12-01 06:57:24.892249192 +0000 UTC m=+854.457262166" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.899529 4632 scope.go:117] "RemoveContainer" containerID="f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.917902 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.927004 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-854cc8dd8c-b77x8"] Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.943822 4632 scope.go:117] "RemoveContainer" containerID="0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094" Dec 01 06:57:24 crc kubenswrapper[4632]: E1201 06:57:24.945085 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094\": container with ID starting with 0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094 not found: ID does not exist" containerID="0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.945132 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094"} err="failed to get container status \"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094\": rpc error: code = NotFound desc = could not find container \"0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094\": container with ID starting with 0322092d1206fbadec21a328d62dd4028dc51800e7c6e973ac9ef3d21c886094 not found: ID does not exist" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.945154 4632 scope.go:117] "RemoveContainer" containerID="f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214" Dec 01 06:57:24 crc kubenswrapper[4632]: E1201 06:57:24.946150 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214\": container with ID starting with f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214 not found: ID does not exist" containerID="f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214" Dec 01 06:57:24 crc kubenswrapper[4632]: I1201 06:57:24.946186 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214"} err="failed to get container status \"f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214\": rpc error: code = NotFound desc = could not find container \"f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214\": container with ID starting with f5cc4b0e5aab6f7ca11822c5736e25120588a8324096a27ee6b3f74ad6b1c214 not found: ID does not exist" Dec 01 06:57:25 crc kubenswrapper[4632]: I1201 06:57:25.064173 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:25 crc kubenswrapper[4632]: I1201 06:57:25.863159 4632 generic.go:334] "Generic (PLEG): container finished" podID="cd88df45-3cae-489b-8811-23ca534c8b04" containerID="f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f" exitCode=0 Dec 01 06:57:25 crc kubenswrapper[4632]: I1201 06:57:25.863211 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerDied","Data":"f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f"} Dec 01 06:57:25 crc kubenswrapper[4632]: I1201 06:57:25.863536 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerStarted","Data":"aac6b1667239f9bb1c26ef2c2ea182771c358e3c9d9b83b54413a3a8d165cce5"} Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.138484 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-b9m9r"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.139471 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.163415 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-a828-account-create-update-wt9ph"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.164700 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.169878 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.171025 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-b9m9r"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.180788 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmv4k\" (UniqueName: \"kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.181025 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.194283 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a828-account-create-update-wt9ph"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.243060 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-dr5wg"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.244455 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.254790 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-dr5wg"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.264315 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-93e6-account-create-update-k2rwx"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.265562 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.270851 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282020 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-93e6-account-create-update-k2rwx"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282154 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282237 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282292 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282333 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf9cl\" (UniqueName: \"kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282412 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94gpp\" (UniqueName: \"kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282451 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmv4k\" (UniqueName: \"kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.282933 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.301318 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmv4k\" (UniqueName: \"kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k\") pod \"cinder-db-create-b9m9r\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384198 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2kd5\" (UniqueName: \"kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384263 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384451 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf9cl\" (UniqueName: \"kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384618 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94gpp\" (UniqueName: \"kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384657 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384800 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.384876 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.385602 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.399509 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-pgkg7"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.400604 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.403806 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ll652" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.403853 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf9cl\" (UniqueName: \"kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl\") pod \"barbican-db-create-dr5wg\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.404012 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.404134 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.405692 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.419798 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94gpp\" (UniqueName: \"kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp\") pod \"cinder-a828-account-create-update-wt9ph\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.427642 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-pgkg7"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.453154 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.471923 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a527-account-create-update-26dwb"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.480054 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.481230 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.483999 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a527-account-create-update-26dwb"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.486888 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.486943 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2kd5\" (UniqueName: \"kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.487064 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.487127 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wckzj\" (UniqueName: \"kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.487185 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.487246 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.487876 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.507729 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2kd5\" (UniqueName: \"kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5\") pod \"barbican-93e6-account-create-update-k2rwx\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.557084 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-nsn2w"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.559346 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.561001 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.578091 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.580929 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nsn2w"] Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589554 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589676 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6gsj\" (UniqueName: \"kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589760 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589796 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589817 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589849 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4h9l\" (UniqueName: \"kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.589886 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wckzj\" (UniqueName: \"kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.597156 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.602082 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.613374 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wckzj\" (UniqueName: \"kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj\") pod \"keystone-db-sync-pgkg7\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.692191 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4h9l\" (UniqueName: \"kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.692629 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6gsj\" (UniqueName: \"kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.692680 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.692704 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.693597 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.693620 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.714760 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4h9l\" (UniqueName: \"kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l\") pod \"neutron-db-create-nsn2w\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.723552 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6gsj\" (UniqueName: \"kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj\") pod \"neutron-a527-account-create-update-26dwb\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.760376 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e9014ea-d672-4919-84e6-659f8f030d66" path="/var/lib/kubelet/pods/0e9014ea-d672-4919-84e6-659f8f030d66/volumes" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.825862 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.837397 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.879338 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:26 crc kubenswrapper[4632]: I1201 06:57:26.927947 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-b9m9r"] Dec 01 06:57:27 crc kubenswrapper[4632]: I1201 06:57:27.029018 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-a828-account-create-update-wt9ph"] Dec 01 06:57:27 crc kubenswrapper[4632]: I1201 06:57:27.090411 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-dr5wg"] Dec 01 06:57:29 crc kubenswrapper[4632]: I1201 06:57:29.409737 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:29 crc kubenswrapper[4632]: I1201 06:57:29.448472 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:29 crc kubenswrapper[4632]: I1201 06:57:29.839078 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.101539 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15e859ff_6075_4487_a933_1e037cfa00d0.slice/crio-cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300 WatchSource:0}: Error finding container cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300: Status 404 returned error can't find the container with id cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300 Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.104778 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfaaa590b_70fd_446c_a484_e932b8d2549a.slice/crio-cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef WatchSource:0}: Error finding container cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef: Status 404 returned error can't find the container with id cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.108192 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod826e84c3_9339_4e50_9845_ca3dd1e9fc67.slice/crio-f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446 WatchSource:0}: Error finding container f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446: Status 404 returned error can't find the container with id f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.535539 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-93e6-account-create-update-k2rwx"] Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.543745 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c0bc241_1784_4738_9eb9_cba060f1d9d8.slice/crio-a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb WatchSource:0}: Error finding container a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb: Status 404 returned error can't find the container with id a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.632480 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a527-account-create-update-26dwb"] Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.638168 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-nsn2w"] Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.721281 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-pgkg7"] Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.801047 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57240989_7936_48f2_9686_ef72e5dfa1a5.slice/crio-ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c WatchSource:0}: Error finding container ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c: Status 404 returned error can't find the container with id ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.801710 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d4743b6_ab1b_4792_a781_4849f8b13e94.slice/crio-e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f WatchSource:0}: Error finding container e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f: Status 404 returned error can't find the container with id e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f Dec 01 06:57:30 crc kubenswrapper[4632]: W1201 06:57:30.827215 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43c55cae_a062_4bf2_8649_fee844127588.slice/crio-22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231 WatchSource:0}: Error finding container 22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231: Status 404 returned error can't find the container with id 22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.909831 4632 generic.go:334] "Generic (PLEG): container finished" podID="faaa590b-70fd-446c-a484-e932b8d2549a" containerID="ed629d560af6a946907e282a4d8b0a4ef2d9a6dfb6728cd16e6264e2626398e9" exitCode=0 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.909875 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a828-account-create-update-wt9ph" event={"ID":"faaa590b-70fd-446c-a484-e932b8d2549a","Type":"ContainerDied","Data":"ed629d560af6a946907e282a4d8b0a4ef2d9a6dfb6728cd16e6264e2626398e9"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.909927 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a828-account-create-update-wt9ph" event={"ID":"faaa590b-70fd-446c-a484-e932b8d2549a","Type":"ContainerStarted","Data":"cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.912378 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pgkg7" event={"ID":"57240989-7936-48f2-9686-ef72e5dfa1a5","Type":"ContainerStarted","Data":"ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.914257 4632 generic.go:334] "Generic (PLEG): container finished" podID="826e84c3-9339-4e50-9845-ca3dd1e9fc67" containerID="89bf2c33db503953d591a56506618d04a7b870c7338ec0e8892c1cd3a1062f94" exitCode=0 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.914324 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dr5wg" event={"ID":"826e84c3-9339-4e50-9845-ca3dd1e9fc67","Type":"ContainerDied","Data":"89bf2c33db503953d591a56506618d04a7b870c7338ec0e8892c1cd3a1062f94"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.914374 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dr5wg" event={"ID":"826e84c3-9339-4e50-9845-ca3dd1e9fc67","Type":"ContainerStarted","Data":"f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.915573 4632 generic.go:334] "Generic (PLEG): container finished" podID="15e859ff-6075-4487-a933-1e037cfa00d0" containerID="87539484663fff4cba89e750f3d9c33f4eda19407f661838a880c975a0ace6d8" exitCode=0 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.915618 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-b9m9r" event={"ID":"15e859ff-6075-4487-a933-1e037cfa00d0","Type":"ContainerDied","Data":"87539484663fff4cba89e750f3d9c33f4eda19407f661838a880c975a0ace6d8"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.915635 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-b9m9r" event={"ID":"15e859ff-6075-4487-a933-1e037cfa00d0","Type":"ContainerStarted","Data":"cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.918610 4632 generic.go:334] "Generic (PLEG): container finished" podID="cd88df45-3cae-489b-8811-23ca534c8b04" containerID="7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a" exitCode=0 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.918652 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerDied","Data":"7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.931323 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-93e6-account-create-update-k2rwx" event={"ID":"3c0bc241-1784-4738-9eb9-cba060f1d9d8","Type":"ContainerStarted","Data":"a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.934268 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nsn2w" event={"ID":"43c55cae-a062-4bf2-8649-fee844127588","Type":"ContainerStarted","Data":"22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.944013 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a527-account-create-update-26dwb" event={"ID":"2d4743b6-ab1b-4792-a781-4849f8b13e94","Type":"ContainerStarted","Data":"e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f"} Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.944117 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rf8lj" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="registry-server" containerID="cri-o://089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97" gracePeriod=2 Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.978004 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-a527-account-create-update-26dwb" podStartSLOduration=4.977981115 podStartE2EDuration="4.977981115s" podCreationTimestamp="2025-12-01 06:57:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:30.975924023 +0000 UTC m=+860.540936996" watchObservedRunningTime="2025-12-01 06:57:30.977981115 +0000 UTC m=+860.542994087" Dec 01 06:57:30 crc kubenswrapper[4632]: I1201 06:57:30.998201 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-93e6-account-create-update-k2rwx" podStartSLOduration=4.998178516 podStartE2EDuration="4.998178516s" podCreationTimestamp="2025-12-01 06:57:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:30.991894051 +0000 UTC m=+860.556907034" watchObservedRunningTime="2025-12-01 06:57:30.998178516 +0000 UTC m=+860.563191489" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.859237 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.904103 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content\") pod \"fd8203a0-6352-4188-9f57-1cfaacddd93a\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.904225 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities\") pod \"fd8203a0-6352-4188-9f57-1cfaacddd93a\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.904342 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm2sf\" (UniqueName: \"kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf\") pod \"fd8203a0-6352-4188-9f57-1cfaacddd93a\" (UID: \"fd8203a0-6352-4188-9f57-1cfaacddd93a\") " Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.904940 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities" (OuterVolumeSpecName: "utilities") pod "fd8203a0-6352-4188-9f57-1cfaacddd93a" (UID: "fd8203a0-6352-4188-9f57-1cfaacddd93a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.910551 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf" (OuterVolumeSpecName: "kube-api-access-tm2sf") pod "fd8203a0-6352-4188-9f57-1cfaacddd93a" (UID: "fd8203a0-6352-4188-9f57-1cfaacddd93a"). InnerVolumeSpecName "kube-api-access-tm2sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.916815 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd8203a0-6352-4188-9f57-1cfaacddd93a" (UID: "fd8203a0-6352-4188-9f57-1cfaacddd93a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.954129 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerStarted","Data":"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.956162 4632 generic.go:334] "Generic (PLEG): container finished" podID="3c0bc241-1784-4738-9eb9-cba060f1d9d8" containerID="2da2a7ef1e02681148f3622763972f6fba2b72eaf684467e5d6752c822080119" exitCode=0 Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.956239 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-93e6-account-create-update-k2rwx" event={"ID":"3c0bc241-1784-4738-9eb9-cba060f1d9d8","Type":"ContainerDied","Data":"2da2a7ef1e02681148f3622763972f6fba2b72eaf684467e5d6752c822080119"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.957826 4632 generic.go:334] "Generic (PLEG): container finished" podID="43c55cae-a062-4bf2-8649-fee844127588" containerID="407d4134f2fb4d65cf02f7676c4ea3065d0b4ecb9507bd9a2300329cd709a788" exitCode=0 Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.957895 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nsn2w" event={"ID":"43c55cae-a062-4bf2-8649-fee844127588","Type":"ContainerDied","Data":"407d4134f2fb4d65cf02f7676c4ea3065d0b4ecb9507bd9a2300329cd709a788"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.959661 4632 generic.go:334] "Generic (PLEG): container finished" podID="2d4743b6-ab1b-4792-a781-4849f8b13e94" containerID="c5fb34b21eac3eab41cc36fd135051a55ecabfa1a4de3bae5ac9d52606b1176f" exitCode=0 Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.959742 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a527-account-create-update-26dwb" event={"ID":"2d4743b6-ab1b-4792-a781-4849f8b13e94","Type":"ContainerDied","Data":"c5fb34b21eac3eab41cc36fd135051a55ecabfa1a4de3bae5ac9d52606b1176f"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.963923 4632 generic.go:334] "Generic (PLEG): container finished" podID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerID="089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97" exitCode=0 Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.963995 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rf8lj" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.963994 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerDied","Data":"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.964074 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rf8lj" event={"ID":"fd8203a0-6352-4188-9f57-1cfaacddd93a","Type":"ContainerDied","Data":"6c21165d50a106c21e5123bf82a591ee098e11455a9b50711430e2ee2c41539d"} Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.964099 4632 scope.go:117] "RemoveContainer" containerID="089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97" Dec 01 06:57:31 crc kubenswrapper[4632]: I1201 06:57:31.979803 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qpnjp" podStartSLOduration=2.31487547 podStartE2EDuration="7.979787279s" podCreationTimestamp="2025-12-01 06:57:24 +0000 UTC" firstStartedPulling="2025-12-01 06:57:25.864909745 +0000 UTC m=+855.429922718" lastFinishedPulling="2025-12-01 06:57:31.529821544 +0000 UTC m=+861.094834527" observedRunningTime="2025-12-01 06:57:31.969193394 +0000 UTC m=+861.534206377" watchObservedRunningTime="2025-12-01 06:57:31.979787279 +0000 UTC m=+861.544800252" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.001966 4632 scope.go:117] "RemoveContainer" containerID="5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.013741 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.013777 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm2sf\" (UniqueName: \"kubernetes.io/projected/fd8203a0-6352-4188-9f57-1cfaacddd93a-kube-api-access-tm2sf\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.013791 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd8203a0-6352-4188-9f57-1cfaacddd93a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.034703 4632 scope.go:117] "RemoveContainer" containerID="16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.066001 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.072716 4632 scope.go:117] "RemoveContainer" containerID="089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.073061 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rf8lj"] Dec 01 06:57:32 crc kubenswrapper[4632]: E1201 06:57:32.073555 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97\": container with ID starting with 089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97 not found: ID does not exist" containerID="089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.073612 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97"} err="failed to get container status \"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97\": rpc error: code = NotFound desc = could not find container \"089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97\": container with ID starting with 089fe112ddad57134db1f397c88f2b6672821a85476bb459581a4e6073b7db97 not found: ID does not exist" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.073641 4632 scope.go:117] "RemoveContainer" containerID="5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb" Dec 01 06:57:32 crc kubenswrapper[4632]: E1201 06:57:32.074862 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb\": container with ID starting with 5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb not found: ID does not exist" containerID="5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.074904 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb"} err="failed to get container status \"5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb\": rpc error: code = NotFound desc = could not find container \"5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb\": container with ID starting with 5931bacc3d7cd6937eb9dbdc546ecb9d764b6d93035c4890e8c42a535dda4eeb not found: ID does not exist" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.074931 4632 scope.go:117] "RemoveContainer" containerID="16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9" Dec 01 06:57:32 crc kubenswrapper[4632]: E1201 06:57:32.076596 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9\": container with ID starting with 16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9 not found: ID does not exist" containerID="16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.076653 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9"} err="failed to get container status \"16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9\": rpc error: code = NotFound desc = could not find container \"16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9\": container with ID starting with 16816266aef49d6e0b2743432a622c1b85b262f1055d2e94a7a2b64560d6b9f9 not found: ID does not exist" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.338365 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.420068 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmv4k\" (UniqueName: \"kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k\") pod \"15e859ff-6075-4487-a933-1e037cfa00d0\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.420409 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts\") pod \"15e859ff-6075-4487-a933-1e037cfa00d0\" (UID: \"15e859ff-6075-4487-a933-1e037cfa00d0\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.421072 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "15e859ff-6075-4487-a933-1e037cfa00d0" (UID: "15e859ff-6075-4487-a933-1e037cfa00d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.424276 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k" (OuterVolumeSpecName: "kube-api-access-pmv4k") pod "15e859ff-6075-4487-a933-1e037cfa00d0" (UID: "15e859ff-6075-4487-a933-1e037cfa00d0"). InnerVolumeSpecName "kube-api-access-pmv4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.440143 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.446677 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.513970 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522140 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wf9cl\" (UniqueName: \"kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl\") pod \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522236 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94gpp\" (UniqueName: \"kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp\") pod \"faaa590b-70fd-446c-a484-e932b8d2549a\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522408 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts\") pod \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\" (UID: \"826e84c3-9339-4e50-9845-ca3dd1e9fc67\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522435 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts\") pod \"faaa590b-70fd-446c-a484-e932b8d2549a\" (UID: \"faaa590b-70fd-446c-a484-e932b8d2549a\") " Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522856 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmv4k\" (UniqueName: \"kubernetes.io/projected/15e859ff-6075-4487-a933-1e037cfa00d0-kube-api-access-pmv4k\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522877 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/15e859ff-6075-4487-a933-1e037cfa00d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.522906 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "826e84c3-9339-4e50-9845-ca3dd1e9fc67" (UID: "826e84c3-9339-4e50-9845-ca3dd1e9fc67"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.523076 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "faaa590b-70fd-446c-a484-e932b8d2549a" (UID: "faaa590b-70fd-446c-a484-e932b8d2549a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.525135 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl" (OuterVolumeSpecName: "kube-api-access-wf9cl") pod "826e84c3-9339-4e50-9845-ca3dd1e9fc67" (UID: "826e84c3-9339-4e50-9845-ca3dd1e9fc67"). InnerVolumeSpecName "kube-api-access-wf9cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.525587 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp" (OuterVolumeSpecName: "kube-api-access-94gpp") pod "faaa590b-70fd-446c-a484-e932b8d2549a" (UID: "faaa590b-70fd-446c-a484-e932b8d2549a"). InnerVolumeSpecName "kube-api-access-94gpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.580089 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.580323 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="dnsmasq-dns" containerID="cri-o://9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e" gracePeriod=10 Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.624970 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wf9cl\" (UniqueName: \"kubernetes.io/projected/826e84c3-9339-4e50-9845-ca3dd1e9fc67-kube-api-access-wf9cl\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.625010 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94gpp\" (UniqueName: \"kubernetes.io/projected/faaa590b-70fd-446c-a484-e932b8d2549a-kube-api-access-94gpp\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.625020 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/826e84c3-9339-4e50-9845-ca3dd1e9fc67-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.625030 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/faaa590b-70fd-446c-a484-e932b8d2549a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.760923 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" path="/var/lib/kubelet/pods/fd8203a0-6352-4188-9f57-1cfaacddd93a/volumes" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.939035 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.976958 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-a828-account-create-update-wt9ph" event={"ID":"faaa590b-70fd-446c-a484-e932b8d2549a","Type":"ContainerDied","Data":"cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef"} Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.977015 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cf681c9e5f7176fa1a26669bb33e2122d1eb083fd30faea520054e80ccc8c0ef" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.977073 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-a828-account-create-update-wt9ph" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.980728 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-dr5wg" event={"ID":"826e84c3-9339-4e50-9845-ca3dd1e9fc67","Type":"ContainerDied","Data":"f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446"} Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.980758 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6e69ef6b24c5479be7fa0e9c0d5ac3578ed92164a39944229593dbb396be446" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.980800 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-dr5wg" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.985056 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-b9m9r" event={"ID":"15e859ff-6075-4487-a933-1e037cfa00d0","Type":"ContainerDied","Data":"cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300"} Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.985100 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc8aa647820357a89e9837958c608a4b204ba435373aaaf56089a5e2a1cc3300" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.985139 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-b9m9r" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.992897 4632 generic.go:334] "Generic (PLEG): container finished" podID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerID="9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e" exitCode=0 Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.992961 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.993082 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" event={"ID":"64843eea-4a56-4384-bafe-1f16a3ca66c9","Type":"ContainerDied","Data":"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e"} Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.993296 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9c9f7685-tp77b" event={"ID":"64843eea-4a56-4384-bafe-1f16a3ca66c9","Type":"ContainerDied","Data":"6e2bfdceaafed3353647a7dcb252ccd4e1965d86b66b3439b33363f24a856c16"} Dec 01 06:57:32 crc kubenswrapper[4632]: I1201 06:57:32.993368 4632 scope.go:117] "RemoveContainer" containerID="9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.016415 4632 scope.go:117] "RemoveContainer" containerID="b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.035294 4632 scope.go:117] "RemoveContainer" containerID="9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037221 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb\") pod \"64843eea-4a56-4384-bafe-1f16a3ca66c9\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037270 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9g2f\" (UniqueName: \"kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f\") pod \"64843eea-4a56-4384-bafe-1f16a3ca66c9\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037381 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb\") pod \"64843eea-4a56-4384-bafe-1f16a3ca66c9\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " Dec 01 06:57:33 crc kubenswrapper[4632]: E1201 06:57:33.037402 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e\": container with ID starting with 9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e not found: ID does not exist" containerID="9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037442 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e"} err="failed to get container status \"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e\": rpc error: code = NotFound desc = could not find container \"9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e\": container with ID starting with 9809ba73b2753cf1e2fea50db57cb2330f5b7ff6b6f796a223e64294f6d0991e not found: ID does not exist" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037471 4632 scope.go:117] "RemoveContainer" containerID="b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037572 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config\") pod \"64843eea-4a56-4384-bafe-1f16a3ca66c9\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.037595 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc\") pod \"64843eea-4a56-4384-bafe-1f16a3ca66c9\" (UID: \"64843eea-4a56-4384-bafe-1f16a3ca66c9\") " Dec 01 06:57:33 crc kubenswrapper[4632]: E1201 06:57:33.040857 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83\": container with ID starting with b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83 not found: ID does not exist" containerID="b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.040930 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83"} err="failed to get container status \"b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83\": rpc error: code = NotFound desc = could not find container \"b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83\": container with ID starting with b0b768f9692e21bbb6539f6446da06187420c98b3edba652845d55afb7d3fa83 not found: ID does not exist" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.044845 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f" (OuterVolumeSpecName: "kube-api-access-n9g2f") pod "64843eea-4a56-4384-bafe-1f16a3ca66c9" (UID: "64843eea-4a56-4384-bafe-1f16a3ca66c9"). InnerVolumeSpecName "kube-api-access-n9g2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.095683 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "64843eea-4a56-4384-bafe-1f16a3ca66c9" (UID: "64843eea-4a56-4384-bafe-1f16a3ca66c9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.106850 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config" (OuterVolumeSpecName: "config") pod "64843eea-4a56-4384-bafe-1f16a3ca66c9" (UID: "64843eea-4a56-4384-bafe-1f16a3ca66c9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.107377 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "64843eea-4a56-4384-bafe-1f16a3ca66c9" (UID: "64843eea-4a56-4384-bafe-1f16a3ca66c9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.112763 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "64843eea-4a56-4384-bafe-1f16a3ca66c9" (UID: "64843eea-4a56-4384-bafe-1f16a3ca66c9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.139610 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.139636 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.139646 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.139658 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9g2f\" (UniqueName: \"kubernetes.io/projected/64843eea-4a56-4384-bafe-1f16a3ca66c9-kube-api-access-n9g2f\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.139667 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/64843eea-4a56-4384-bafe-1f16a3ca66c9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.336530 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.342206 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c9c9f7685-tp77b"] Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.413145 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.443616 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts\") pod \"43c55cae-a062-4bf2-8649-fee844127588\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.443652 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4h9l\" (UniqueName: \"kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l\") pod \"43c55cae-a062-4bf2-8649-fee844127588\" (UID: \"43c55cae-a062-4bf2-8649-fee844127588\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.444321 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43c55cae-a062-4bf2-8649-fee844127588" (UID: "43c55cae-a062-4bf2-8649-fee844127588"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.448244 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l" (OuterVolumeSpecName: "kube-api-access-d4h9l") pod "43c55cae-a062-4bf2-8649-fee844127588" (UID: "43c55cae-a062-4bf2-8649-fee844127588"). InnerVolumeSpecName "kube-api-access-d4h9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.474833 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.482628 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.545880 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts\") pod \"2d4743b6-ab1b-4792-a781-4849f8b13e94\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546006 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2kd5\" (UniqueName: \"kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5\") pod \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546051 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6gsj\" (UniqueName: \"kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj\") pod \"2d4743b6-ab1b-4792-a781-4849f8b13e94\" (UID: \"2d4743b6-ab1b-4792-a781-4849f8b13e94\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546161 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts\") pod \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\" (UID: \"3c0bc241-1784-4738-9eb9-cba060f1d9d8\") " Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546660 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43c55cae-a062-4bf2-8649-fee844127588-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546695 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4h9l\" (UniqueName: \"kubernetes.io/projected/43c55cae-a062-4bf2-8649-fee844127588-kube-api-access-d4h9l\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.546920 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c0bc241-1784-4738-9eb9-cba060f1d9d8" (UID: "3c0bc241-1784-4738-9eb9-cba060f1d9d8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.547241 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d4743b6-ab1b-4792-a781-4849f8b13e94" (UID: "2d4743b6-ab1b-4792-a781-4849f8b13e94"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.550859 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5" (OuterVolumeSpecName: "kube-api-access-h2kd5") pod "3c0bc241-1784-4738-9eb9-cba060f1d9d8" (UID: "3c0bc241-1784-4738-9eb9-cba060f1d9d8"). InnerVolumeSpecName "kube-api-access-h2kd5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.552291 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj" (OuterVolumeSpecName: "kube-api-access-s6gsj") pod "2d4743b6-ab1b-4792-a781-4849f8b13e94" (UID: "2d4743b6-ab1b-4792-a781-4849f8b13e94"). InnerVolumeSpecName "kube-api-access-s6gsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.650488 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c0bc241-1784-4738-9eb9-cba060f1d9d8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.650529 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d4743b6-ab1b-4792-a781-4849f8b13e94-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.650540 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2kd5\" (UniqueName: \"kubernetes.io/projected/3c0bc241-1784-4738-9eb9-cba060f1d9d8-kube-api-access-h2kd5\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:33 crc kubenswrapper[4632]: I1201 06:57:33.650552 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6gsj\" (UniqueName: \"kubernetes.io/projected/2d4743b6-ab1b-4792-a781-4849f8b13e94-kube-api-access-s6gsj\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.011147 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-93e6-account-create-update-k2rwx" event={"ID":"3c0bc241-1784-4738-9eb9-cba060f1d9d8","Type":"ContainerDied","Data":"a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb"} Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.011193 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8c3061de0799f113eed65cd5fede799835770daf2711e25e05bd2fff7f9b0bb" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.011248 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-93e6-account-create-update-k2rwx" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.015442 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-nsn2w" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.015456 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-nsn2w" event={"ID":"43c55cae-a062-4bf2-8649-fee844127588","Type":"ContainerDied","Data":"22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231"} Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.015509 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22d14a3d02a386ebd4c0a4162455340bf6ddff790d03bc511f9607f3d7704231" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.017158 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a527-account-create-update-26dwb" event={"ID":"2d4743b6-ab1b-4792-a781-4849f8b13e94","Type":"ContainerDied","Data":"e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f"} Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.017194 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e13cbf4a2293de6ee0af733eeb5dad345af806cc3c275861247ebde80beb140f" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.017265 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a527-account-create-update-26dwb" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.760133 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" path="/var/lib/kubelet/pods/64843eea-4a56-4384-bafe-1f16a3ca66c9/volumes" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.791869 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.792100 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:34 crc kubenswrapper[4632]: I1201 06:57:34.834704 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.032339 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pgkg7" event={"ID":"57240989-7936-48f2-9686-ef72e5dfa1a5","Type":"ContainerStarted","Data":"69d68e096c31bb52b93885c0077c4cbaefa6791d617dee7fd5debdb1f5131dc8"} Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.054375 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-pgkg7" podStartSLOduration=5.251943991 podStartE2EDuration="10.054345766s" podCreationTimestamp="2025-12-01 06:57:26 +0000 UTC" firstStartedPulling="2025-12-01 06:57:30.804179359 +0000 UTC m=+860.369192332" lastFinishedPulling="2025-12-01 06:57:35.606581133 +0000 UTC m=+865.171594107" observedRunningTime="2025-12-01 06:57:36.044221187 +0000 UTC m=+865.609234160" watchObservedRunningTime="2025-12-01 06:57:36.054345766 +0000 UTC m=+865.619358739" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.070538 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244137 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244610 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d4743b6-ab1b-4792-a781-4849f8b13e94" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244634 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d4743b6-ab1b-4792-a781-4849f8b13e94" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244654 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="registry-server" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244661 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="registry-server" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244670 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="826e84c3-9339-4e50-9845-ca3dd1e9fc67" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244676 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="826e84c3-9339-4e50-9845-ca3dd1e9fc67" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244692 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faaa590b-70fd-446c-a484-e932b8d2549a" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244698 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="faaa590b-70fd-446c-a484-e932b8d2549a" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244708 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="init" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244714 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="init" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244722 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15e859ff-6075-4487-a933-1e037cfa00d0" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244728 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="15e859ff-6075-4487-a933-1e037cfa00d0" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244744 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="extract-utilities" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244750 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="extract-utilities" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244766 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="extract-content" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244773 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="extract-content" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244787 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43c55cae-a062-4bf2-8649-fee844127588" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244794 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="43c55cae-a062-4bf2-8649-fee844127588" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244803 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c0bc241-1784-4738-9eb9-cba060f1d9d8" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244808 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c0bc241-1784-4738-9eb9-cba060f1d9d8" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: E1201 06:57:36.244817 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="dnsmasq-dns" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.244822 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="dnsmasq-dns" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245030 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c0bc241-1784-4738-9eb9-cba060f1d9d8" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245045 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd8203a0-6352-4188-9f57-1cfaacddd93a" containerName="registry-server" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245062 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d4743b6-ab1b-4792-a781-4849f8b13e94" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245071 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="64843eea-4a56-4384-bafe-1f16a3ca66c9" containerName="dnsmasq-dns" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245078 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="faaa590b-70fd-446c-a484-e932b8d2549a" containerName="mariadb-account-create-update" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245094 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="826e84c3-9339-4e50-9845-ca3dd1e9fc67" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245103 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="15e859ff-6075-4487-a933-1e037cfa00d0" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.245109 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="43c55cae-a062-4bf2-8649-fee844127588" containerName="mariadb-database-create" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.246428 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.265754 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.311075 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gznh7\" (UniqueName: \"kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.311203 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.311263 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.412718 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gznh7\" (UniqueName: \"kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.412882 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.412976 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.413500 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.413524 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.427936 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gznh7\" (UniqueName: \"kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7\") pod \"redhat-operators-dnt67\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:36 crc kubenswrapper[4632]: I1201 06:57:36.567190 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:37 crc kubenswrapper[4632]: I1201 06:57:36.991247 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:57:37 crc kubenswrapper[4632]: W1201 06:57:36.993718 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ce86ad4_7718_4227_a97b_2e9f2dc3bc27.slice/crio-f3a6c8e73983a621c5120688f8d207a827bfe4efeb75f8cbe1cce5abe3019b4f WatchSource:0}: Error finding container f3a6c8e73983a621c5120688f8d207a827bfe4efeb75f8cbe1cce5abe3019b4f: Status 404 returned error can't find the container with id f3a6c8e73983a621c5120688f8d207a827bfe4efeb75f8cbe1cce5abe3019b4f Dec 01 06:57:37 crc kubenswrapper[4632]: I1201 06:57:37.040525 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerStarted","Data":"f3a6c8e73983a621c5120688f8d207a827bfe4efeb75f8cbe1cce5abe3019b4f"} Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.050100 4632 generic.go:334] "Generic (PLEG): container finished" podID="57240989-7936-48f2-9686-ef72e5dfa1a5" containerID="69d68e096c31bb52b93885c0077c4cbaefa6791d617dee7fd5debdb1f5131dc8" exitCode=0 Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.050196 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pgkg7" event={"ID":"57240989-7936-48f2-9686-ef72e5dfa1a5","Type":"ContainerDied","Data":"69d68e096c31bb52b93885c0077c4cbaefa6791d617dee7fd5debdb1f5131dc8"} Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.053487 4632 generic.go:334] "Generic (PLEG): container finished" podID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerID="999da89cd0ca1508b89d3ef5319f13a1cd8d61e5c62d4f0ad1debc7f1fe5f283" exitCode=0 Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.053545 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerDied","Data":"999da89cd0ca1508b89d3ef5319f13a1cd8d61e5c62d4f0ad1debc7f1fe5f283"} Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.441824 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.442076 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qpnjp" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="registry-server" containerID="cri-o://df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b" gracePeriod=2 Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.835252 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.864369 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities\") pod \"cd88df45-3cae-489b-8811-23ca534c8b04\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.864676 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl6s7\" (UniqueName: \"kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7\") pod \"cd88df45-3cae-489b-8811-23ca534c8b04\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.864794 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content\") pod \"cd88df45-3cae-489b-8811-23ca534c8b04\" (UID: \"cd88df45-3cae-489b-8811-23ca534c8b04\") " Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.866056 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities" (OuterVolumeSpecName: "utilities") pod "cd88df45-3cae-489b-8811-23ca534c8b04" (UID: "cd88df45-3cae-489b-8811-23ca534c8b04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.872477 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7" (OuterVolumeSpecName: "kube-api-access-cl6s7") pod "cd88df45-3cae-489b-8811-23ca534c8b04" (UID: "cd88df45-3cae-489b-8811-23ca534c8b04"). InnerVolumeSpecName "kube-api-access-cl6s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.912666 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd88df45-3cae-489b-8811-23ca534c8b04" (UID: "cd88df45-3cae-489b-8811-23ca534c8b04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.967552 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.967585 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd88df45-3cae-489b-8811-23ca534c8b04-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:38 crc kubenswrapper[4632]: I1201 06:57:38.967595 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl6s7\" (UniqueName: \"kubernetes.io/projected/cd88df45-3cae-489b-8811-23ca534c8b04-kube-api-access-cl6s7\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.064045 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerStarted","Data":"3184f860516bb772e0587f57f1ea878a37a69bdfd92f36a71c6e79e8fe633995"} Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.066478 4632 generic.go:334] "Generic (PLEG): container finished" podID="cd88df45-3cae-489b-8811-23ca534c8b04" containerID="df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b" exitCode=0 Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.066532 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qpnjp" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.066575 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerDied","Data":"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b"} Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.066608 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qpnjp" event={"ID":"cd88df45-3cae-489b-8811-23ca534c8b04","Type":"ContainerDied","Data":"aac6b1667239f9bb1c26ef2c2ea182771c358e3c9d9b83b54413a3a8d165cce5"} Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.066638 4632 scope.go:117] "RemoveContainer" containerID="df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.088552 4632 scope.go:117] "RemoveContainer" containerID="7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.104572 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.109468 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qpnjp"] Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.129295 4632 scope.go:117] "RemoveContainer" containerID="f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.154228 4632 scope.go:117] "RemoveContainer" containerID="df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b" Dec 01 06:57:39 crc kubenswrapper[4632]: E1201 06:57:39.154635 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b\": container with ID starting with df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b not found: ID does not exist" containerID="df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.154672 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b"} err="failed to get container status \"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b\": rpc error: code = NotFound desc = could not find container \"df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b\": container with ID starting with df1017d1d25186d54915df38d727e1ac5b0bb0fc2e17f35adba38f47d494a11b not found: ID does not exist" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.154695 4632 scope.go:117] "RemoveContainer" containerID="7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a" Dec 01 06:57:39 crc kubenswrapper[4632]: E1201 06:57:39.154939 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a\": container with ID starting with 7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a not found: ID does not exist" containerID="7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.155011 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a"} err="failed to get container status \"7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a\": rpc error: code = NotFound desc = could not find container \"7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a\": container with ID starting with 7a3daf33bdd92c7517e14c74c059cfa4a9163b6b1fb18da8874b00dd4296259a not found: ID does not exist" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.155028 4632 scope.go:117] "RemoveContainer" containerID="f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f" Dec 01 06:57:39 crc kubenswrapper[4632]: E1201 06:57:39.155215 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f\": container with ID starting with f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f not found: ID does not exist" containerID="f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.155238 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f"} err="failed to get container status \"f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f\": rpc error: code = NotFound desc = could not find container \"f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f\": container with ID starting with f1ec582db171720c35aa92edf7e83e01f5cde4f3d3262f8f86f41bda1289a94f not found: ID does not exist" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.286187 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.374207 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle\") pod \"57240989-7936-48f2-9686-ef72e5dfa1a5\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.374334 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data\") pod \"57240989-7936-48f2-9686-ef72e5dfa1a5\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.374546 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wckzj\" (UniqueName: \"kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj\") pod \"57240989-7936-48f2-9686-ef72e5dfa1a5\" (UID: \"57240989-7936-48f2-9686-ef72e5dfa1a5\") " Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.380141 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj" (OuterVolumeSpecName: "kube-api-access-wckzj") pod "57240989-7936-48f2-9686-ef72e5dfa1a5" (UID: "57240989-7936-48f2-9686-ef72e5dfa1a5"). InnerVolumeSpecName "kube-api-access-wckzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.398338 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57240989-7936-48f2-9686-ef72e5dfa1a5" (UID: "57240989-7936-48f2-9686-ef72e5dfa1a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.413746 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data" (OuterVolumeSpecName: "config-data") pod "57240989-7936-48f2-9686-ef72e5dfa1a5" (UID: "57240989-7936-48f2-9686-ef72e5dfa1a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.477596 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.477644 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57240989-7936-48f2-9686-ef72e5dfa1a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:39 crc kubenswrapper[4632]: I1201 06:57:39.477655 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wckzj\" (UniqueName: \"kubernetes.io/projected/57240989-7936-48f2-9686-ef72e5dfa1a5-kube-api-access-wckzj\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.078785 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-pgkg7" event={"ID":"57240989-7936-48f2-9686-ef72e5dfa1a5","Type":"ContainerDied","Data":"ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c"} Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.078830 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-pgkg7" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.078843 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac857bc32ba0fc72a9628704f5b75e0c4b3551e6123be73c4a410a7e59741f6c" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.312507 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:40 crc kubenswrapper[4632]: E1201 06:57:40.313119 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="extract-content" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313139 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="extract-content" Dec 01 06:57:40 crc kubenswrapper[4632]: E1201 06:57:40.313161 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="registry-server" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313168 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="registry-server" Dec 01 06:57:40 crc kubenswrapper[4632]: E1201 06:57:40.313180 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="extract-utilities" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313189 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="extract-utilities" Dec 01 06:57:40 crc kubenswrapper[4632]: E1201 06:57:40.313218 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57240989-7936-48f2-9686-ef72e5dfa1a5" containerName="keystone-db-sync" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313224 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="57240989-7936-48f2-9686-ef72e5dfa1a5" containerName="keystone-db-sync" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313423 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="57240989-7936-48f2-9686-ef72e5dfa1a5" containerName="keystone-db-sync" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.313441 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" containerName="registry-server" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.314237 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.325778 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.411869 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-7ft7x"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.423436 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427131 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ll652" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427233 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427323 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427424 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.403342 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427644 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q684b\" (UniqueName: \"kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427697 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427760 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427902 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.428139 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.427655 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.439016 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7ft7x"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530247 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530314 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530535 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530599 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530629 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530678 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530702 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q684b\" (UniqueName: \"kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530723 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530743 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn5m5\" (UniqueName: \"kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530823 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.530842 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.531730 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.532304 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.532811 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.532879 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.533288 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.557959 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.559189 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q684b\" (UniqueName: \"kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b\") pod \"dnsmasq-dns-76985944d7-fsstq\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.560518 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.564525 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.564803 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.589730 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-4rvbb"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.590985 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.597098 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.597477 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.597620 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-fbrw7" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.604281 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-7q66w"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.606426 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.609718 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.609952 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-hz9v6" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.609962 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.614425 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.634794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.634963 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635073 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn5m5\" (UniqueName: \"kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635142 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635241 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635337 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635580 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635883 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.635960 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.636047 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.636199 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.636273 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz5vl\" (UniqueName: \"kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.636404 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.647484 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.655749 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.658333 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.659271 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn5m5\" (UniqueName: \"kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.662714 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7q66w"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.663576 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.664061 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.665134 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle\") pod \"keystone-bootstrap-7ft7x\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.678580 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4rvbb"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.696551 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-xvgfc"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.707652 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.710430 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xvgfc"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.712823 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.713038 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jh9ph" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.735368 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738519 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738568 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738598 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738680 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738705 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738742 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6mxl\" (UniqueName: \"kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738773 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738819 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wg5f\" (UniqueName: \"kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.738845 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz5vl\" (UniqueName: \"kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.743705 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.748087 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.748733 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.748765 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.749561 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.754594 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.761198 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.761368 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.761555 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.754677 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.768341 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.768495 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.768667 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.768823 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.771307 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.783676 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz5vl\" (UniqueName: \"kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl\") pod \"ceilometer-0\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.788443 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd88df45-3cae-489b-8811-23ca534c8b04" path="/var/lib/kubelet/pods/cd88df45-3cae-489b-8811-23ca534c8b04/volumes" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.789301 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bqchp"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.790640 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.793331 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.793651 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.795587 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.795638 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bqchp"] Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.803114 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-672zc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870719 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-585br\" (UniqueName: \"kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870818 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6mxl\" (UniqueName: \"kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870854 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870875 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870906 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870923 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870947 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wg5f\" (UniqueName: \"kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.870978 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.871988 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872040 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsg78\" (UniqueName: \"kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872099 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872115 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872171 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872206 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872264 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872304 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872324 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.872345 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.873327 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.877122 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.878960 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.880657 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.882784 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.884062 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.884817 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.887220 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6mxl\" (UniqueName: \"kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl\") pod \"cinder-db-sync-7q66w\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.893584 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wg5f\" (UniqueName: \"kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f\") pod \"neutron-db-sync-4rvbb\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.914997 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.926932 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.974688 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.974796 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-585br\" (UniqueName: \"kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.974861 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.974912 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.974993 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975030 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975080 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975134 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftc9q\" (UniqueName: \"kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975157 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsg78\" (UniqueName: \"kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975194 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975251 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975268 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975302 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.975334 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.976144 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.976748 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.976856 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.976915 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.982782 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.983768 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.992960 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsg78\" (UniqueName: \"kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78\") pod \"dnsmasq-dns-76cff69bf5-b4scl\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.993508 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-585br\" (UniqueName: \"kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:40 crc kubenswrapper[4632]: I1201 06:57:40.993553 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle\") pod \"barbican-db-sync-xvgfc\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.066601 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7q66w" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.077253 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.077324 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.077481 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftc9q\" (UniqueName: \"kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.077641 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.077689 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.078185 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.086907 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.087190 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.087394 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.091652 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.096475 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.096527 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftc9q\" (UniqueName: \"kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q\") pod \"placement-db-sync-bqchp\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.099695 4632 generic.go:334] "Generic (PLEG): container finished" podID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerID="3184f860516bb772e0587f57f1ea878a37a69bdfd92f36a71c6e79e8fe633995" exitCode=0 Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.099753 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerDied","Data":"3184f860516bb772e0587f57f1ea878a37a69bdfd92f36a71c6e79e8fe633995"} Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.111215 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.220881 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.313307 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-7ft7x"] Dec 01 06:57:41 crc kubenswrapper[4632]: W1201 06:57:41.325716 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1a1233c4_8d6c_46da_97f0_915f50fb429c.slice/crio-53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e WatchSource:0}: Error finding container 53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e: Status 404 returned error can't find the container with id 53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.457007 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-4rvbb"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.469115 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.479179 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.480706 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.483742 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.484764 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.484992 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.485161 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8f9x2" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.488752 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.529121 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.531593 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.538807 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.539458 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.539480 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.588727 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.588778 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.588825 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.588858 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.588945 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.589012 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.589071 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzclj\" (UniqueName: \"kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.589096 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.631230 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-xvgfc"] Dec 01 06:57:41 crc kubenswrapper[4632]: W1201 06:57:41.634871 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod803c9fb6_6650_4865_b7f1_ed485299302f.slice/crio-72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da WatchSource:0}: Error finding container 72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da: Status 404 returned error can't find the container with id 72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691696 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691798 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691866 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691891 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691932 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzclj\" (UniqueName: \"kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691958 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.691986 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692049 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692093 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjqx6\" (UniqueName: \"kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692168 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692190 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692219 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692246 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692296 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692332 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.692370 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.694097 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.694228 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.696499 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.698552 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.698749 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.698976 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.699488 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.734706 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzclj\" (UniqueName: \"kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.741242 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.751420 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.752846 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-7q66w"] Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.771457 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bqchp"] Dec 01 06:57:41 crc kubenswrapper[4632]: W1201 06:57:41.784414 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17a28608_4b5e_435b_868f_0c9cc98f7c91.slice/crio-7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e WatchSource:0}: Error finding container 7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e: Status 404 returned error can't find the container with id 7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795171 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795213 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795257 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795297 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795319 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjqx6\" (UniqueName: \"kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795376 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795486 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795536 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795591 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795703 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.795747 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.798783 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.801036 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.801292 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.803979 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.806746 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.814116 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjqx6\" (UniqueName: \"kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.825122 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:41 crc kubenswrapper[4632]: I1201 06:57:41.991966 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.118744 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7q66w" event={"ID":"6b176a21-27cf-4608-8787-e91a914be7cb","Type":"ContainerStarted","Data":"55d1ec05cb7d6154b8e3d83453c6e3c80bbb34ecac0dfdb4976d92c4dffb54a7"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.120401 4632 generic.go:334] "Generic (PLEG): container finished" podID="b2e713bc-63b1-4611-9f78-48a7bc34ddeb" containerID="f6b6adaa2fcb3b003d47989a62174c9392028ace6cc22c04d47b7fdb0c3a8de9" exitCode=0 Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.120508 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76985944d7-fsstq" event={"ID":"b2e713bc-63b1-4611-9f78-48a7bc34ddeb","Type":"ContainerDied","Data":"f6b6adaa2fcb3b003d47989a62174c9392028ace6cc22c04d47b7fdb0c3a8de9"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.120550 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76985944d7-fsstq" event={"ID":"b2e713bc-63b1-4611-9f78-48a7bc34ddeb","Type":"ContainerStarted","Data":"c210ee06609eb7108481c4896a3f4b1c112edf0af6923d78777b4c665cd4a8ed"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.123995 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4rvbb" event={"ID":"57384d37-fe8e-4534-b99f-579737abcab7","Type":"ContainerStarted","Data":"415a7ab3b8973040458a0bc2b77217ebe5a8d21230f0ba79b29cbb2d16521e8c"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.124037 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4rvbb" event={"ID":"57384d37-fe8e-4534-b99f-579737abcab7","Type":"ContainerStarted","Data":"ae35a2561dc4e63547cdd7f0a7012f15339f4f7e5891b73b63086e7e23599505"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.165589 4632 generic.go:334] "Generic (PLEG): container finished" podID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerID="b78499eaf2d197b54c7bc1972dfd53bb41d68cc3a8c625d97a7c283da05d5ef7" exitCode=0 Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.165686 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" event={"ID":"42ac38c3-780c-44b4-9d50-aa59cdf15703","Type":"ContainerDied","Data":"b78499eaf2d197b54c7bc1972dfd53bb41d68cc3a8c625d97a7c283da05d5ef7"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.165716 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" event={"ID":"42ac38c3-780c-44b4-9d50-aa59cdf15703","Type":"ContainerStarted","Data":"623c7afe76b43ea1a5713c9c42979d2566c1fc6ec700a00bc7228cf8f9ea6271"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.177253 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-4rvbb" podStartSLOduration=2.177232941 podStartE2EDuration="2.177232941s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:42.162715774 +0000 UTC m=+871.727728747" watchObservedRunningTime="2025-12-01 06:57:42.177232941 +0000 UTC m=+871.742245915" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.230262 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7ft7x" event={"ID":"1a1233c4-8d6c-46da-97f0-915f50fb429c","Type":"ContainerStarted","Data":"a8228ea7fd5bee8cdc77beba5314e95f8c03d1ba22d248e62b16d61b653888c4"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.230303 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7ft7x" event={"ID":"1a1233c4-8d6c-46da-97f0-915f50fb429c","Type":"ContainerStarted","Data":"53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.252468 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bqchp" event={"ID":"17a28608-4b5e-435b-868f-0c9cc98f7c91","Type":"ContainerStarted","Data":"7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.257712 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerStarted","Data":"84ac35f39972efda5ca7cb8f9eff4d97fc2331c636da36253602a60ae21f60a8"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.304681 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xvgfc" event={"ID":"803c9fb6-6650-4865-b7f1-ed485299302f","Type":"ContainerStarted","Data":"72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.320896 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-7ft7x" podStartSLOduration=2.320873998 podStartE2EDuration="2.320873998s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:42.272930503 +0000 UTC m=+871.837943486" watchObservedRunningTime="2025-12-01 06:57:42.320873998 +0000 UTC m=+871.885886971" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.322169 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dnt67" podStartSLOduration=2.628368971 podStartE2EDuration="6.322159574s" podCreationTimestamp="2025-12-01 06:57:36 +0000 UTC" firstStartedPulling="2025-12-01 06:57:38.056792719 +0000 UTC m=+867.621805692" lastFinishedPulling="2025-12-01 06:57:41.750583322 +0000 UTC m=+871.315596295" observedRunningTime="2025-12-01 06:57:42.304979223 +0000 UTC m=+871.869992196" watchObservedRunningTime="2025-12-01 06:57:42.322159574 +0000 UTC m=+871.887172546" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.322900 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerStarted","Data":"da3f70df11c7bfe5be7bfc26b019c4c6e68bd5a27a11bbd5384200bdd1da1849"} Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.522779 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.537104 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.640125 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.712191 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.827065 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.962537 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q684b\" (UniqueName: \"kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.962628 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.962804 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.962829 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.962997 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.963058 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc\") pod \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\" (UID: \"b2e713bc-63b1-4611-9f78-48a7bc34ddeb\") " Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.993172 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:42 crc kubenswrapper[4632]: I1201 06:57:42.994840 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b" (OuterVolumeSpecName: "kube-api-access-q684b") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "kube-api-access-q684b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.008973 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.013281 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config" (OuterVolumeSpecName: "config") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.013565 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.048680 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b2e713bc-63b1-4611-9f78-48a7bc34ddeb" (UID: "b2e713bc-63b1-4611-9f78-48a7bc34ddeb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067075 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067121 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067134 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q684b\" (UniqueName: \"kubernetes.io/projected/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-kube-api-access-q684b\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067147 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067156 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.067181 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b2e713bc-63b1-4611-9f78-48a7bc34ddeb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.184657 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:43 crc kubenswrapper[4632]: W1201 06:57:43.193135 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7f8691a1_4440_42f2_a472_ed03ac4f61b6.slice/crio-117f53cee2825ad0177d416712219c4f541317e20a669831f67a4c1073fc3baa WatchSource:0}: Error finding container 117f53cee2825ad0177d416712219c4f541317e20a669831f67a4c1073fc3baa: Status 404 returned error can't find the container with id 117f53cee2825ad0177d416712219c4f541317e20a669831f67a4c1073fc3baa Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.338189 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76985944d7-fsstq" event={"ID":"b2e713bc-63b1-4611-9f78-48a7bc34ddeb","Type":"ContainerDied","Data":"c210ee06609eb7108481c4896a3f4b1c112edf0af6923d78777b4c665cd4a8ed"} Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.338516 4632 scope.go:117] "RemoveContainer" containerID="f6b6adaa2fcb3b003d47989a62174c9392028ace6cc22c04d47b7fdb0c3a8de9" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.338252 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76985944d7-fsstq" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.362827 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerStarted","Data":"38529b14f6674eccd6aea9f55ebc54c4181d8d558bc98579fdc3c7c890a0d2d0"} Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.365319 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerStarted","Data":"117f53cee2825ad0177d416712219c4f541317e20a669831f67a4c1073fc3baa"} Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.370113 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" event={"ID":"42ac38c3-780c-44b4-9d50-aa59cdf15703","Type":"ContainerStarted","Data":"3715db80686bc9c87d6ca539617efe657208af1e85ad7c30266974cef42299e8"} Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.370479 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.408799 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.416865 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76985944d7-fsstq"] Dec 01 06:57:43 crc kubenswrapper[4632]: I1201 06:57:43.419910 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" podStartSLOduration=3.419896541 podStartE2EDuration="3.419896541s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:43.408636199 +0000 UTC m=+872.973649171" watchObservedRunningTime="2025-12-01 06:57:43.419896541 +0000 UTC m=+872.984909515" Dec 01 06:57:44 crc kubenswrapper[4632]: I1201 06:57:44.396203 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerStarted","Data":"55e33435c6dcc70fae7690563b9fd9b7f72a8ceb3a8b5716666e9c2d3fcf92d9"} Dec 01 06:57:44 crc kubenswrapper[4632]: I1201 06:57:44.459667 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerStarted","Data":"6117ee822881c7bc7f3377a1f925c942768481ddf6eac4227615f2b8ecd95bc5"} Dec 01 06:57:44 crc kubenswrapper[4632]: I1201 06:57:44.767216 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2e713bc-63b1-4611-9f78-48a7bc34ddeb" path="/var/lib/kubelet/pods/b2e713bc-63b1-4611-9f78-48a7bc34ddeb/volumes" Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.476513 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerStarted","Data":"db661bee75d522dba94bc8b09832ff78effe276b958f016a2cc24c1fa0423892"} Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.476585 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-log" containerID="cri-o://55e33435c6dcc70fae7690563b9fd9b7f72a8ceb3a8b5716666e9c2d3fcf92d9" gracePeriod=30 Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.476921 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-httpd" containerID="cri-o://db661bee75d522dba94bc8b09832ff78effe276b958f016a2cc24c1fa0423892" gracePeriod=30 Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.493614 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerStarted","Data":"85c79dbc17b734ee913b2c055c283d6d3fa0767e470a2ee53dc85517dc378592"} Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.493743 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-log" containerID="cri-o://6117ee822881c7bc7f3377a1f925c942768481ddf6eac4227615f2b8ecd95bc5" gracePeriod=30 Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.493961 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-httpd" containerID="cri-o://85c79dbc17b734ee913b2c055c283d6d3fa0767e470a2ee53dc85517dc378592" gracePeriod=30 Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.523964 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.523952688 podStartE2EDuration="5.523952688s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:45.520777899 +0000 UTC m=+875.085790872" watchObservedRunningTime="2025-12-01 06:57:45.523952688 +0000 UTC m=+875.088965661" Dec 01 06:57:45 crc kubenswrapper[4632]: I1201 06:57:45.524628 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.524621119 podStartE2EDuration="5.524621119s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:45.502816074 +0000 UTC m=+875.067829047" watchObservedRunningTime="2025-12-01 06:57:45.524621119 +0000 UTC m=+875.089634092" Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.507382 4632 generic.go:334] "Generic (PLEG): container finished" podID="1a1233c4-8d6c-46da-97f0-915f50fb429c" containerID="a8228ea7fd5bee8cdc77beba5314e95f8c03d1ba22d248e62b16d61b653888c4" exitCode=0 Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.507641 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7ft7x" event={"ID":"1a1233c4-8d6c-46da-97f0-915f50fb429c","Type":"ContainerDied","Data":"a8228ea7fd5bee8cdc77beba5314e95f8c03d1ba22d248e62b16d61b653888c4"} Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.511778 4632 generic.go:334] "Generic (PLEG): container finished" podID="9f344c97-e869-4a69-b084-bd94330e7e79" containerID="85c79dbc17b734ee913b2c055c283d6d3fa0767e470a2ee53dc85517dc378592" exitCode=0 Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.511808 4632 generic.go:334] "Generic (PLEG): container finished" podID="9f344c97-e869-4a69-b084-bd94330e7e79" containerID="6117ee822881c7bc7f3377a1f925c942768481ddf6eac4227615f2b8ecd95bc5" exitCode=143 Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.511862 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerDied","Data":"85c79dbc17b734ee913b2c055c283d6d3fa0767e470a2ee53dc85517dc378592"} Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.511894 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerDied","Data":"6117ee822881c7bc7f3377a1f925c942768481ddf6eac4227615f2b8ecd95bc5"} Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.515222 4632 generic.go:334] "Generic (PLEG): container finished" podID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerID="db661bee75d522dba94bc8b09832ff78effe276b958f016a2cc24c1fa0423892" exitCode=0 Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.515244 4632 generic.go:334] "Generic (PLEG): container finished" podID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerID="55e33435c6dcc70fae7690563b9fd9b7f72a8ceb3a8b5716666e9c2d3fcf92d9" exitCode=143 Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.515265 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerDied","Data":"db661bee75d522dba94bc8b09832ff78effe276b958f016a2cc24c1fa0423892"} Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.515287 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerDied","Data":"55e33435c6dcc70fae7690563b9fd9b7f72a8ceb3a8b5716666e9c2d3fcf92d9"} Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.567761 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:46 crc kubenswrapper[4632]: I1201 06:57:46.568145 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:57:47 crc kubenswrapper[4632]: I1201 06:57:47.618650 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dnt67" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" probeResult="failure" output=< Dec 01 06:57:47 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 06:57:47 crc kubenswrapper[4632]: > Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.626473 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.642640 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.642735 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.642855 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.642879 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.643122 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn5m5\" (UniqueName: \"kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.643250 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys\") pod \"1a1233c4-8d6c-46da-97f0-915f50fb429c\" (UID: \"1a1233c4-8d6c-46da-97f0-915f50fb429c\") " Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.651256 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts" (OuterVolumeSpecName: "scripts") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.656795 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.661218 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.672524 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5" (OuterVolumeSpecName: "kube-api-access-xn5m5") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "kube-api-access-xn5m5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.680675 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data" (OuterVolumeSpecName: "config-data") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.683974 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a1233c4-8d6c-46da-97f0-915f50fb429c" (UID: "1a1233c4-8d6c-46da-97f0-915f50fb429c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748297 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn5m5\" (UniqueName: \"kubernetes.io/projected/1a1233c4-8d6c-46da-97f0-915f50fb429c-kube-api-access-xn5m5\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748334 4632 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748347 4632 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748371 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748381 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:48 crc kubenswrapper[4632]: I1201 06:57:48.748389 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1233c4-8d6c-46da-97f0-915f50fb429c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.497603 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.497923 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.545714 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-7ft7x" event={"ID":"1a1233c4-8d6c-46da-97f0-915f50fb429c","Type":"ContainerDied","Data":"53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e"} Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.545759 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53de5ead63c2af13b7b905a8663afe43bc906c8162671e1b7e628a1cc564768e" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.545781 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-7ft7x" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.693224 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-7ft7x"] Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.701284 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-7ft7x"] Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.810757 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-nbn5v"] Dec 01 06:57:49 crc kubenswrapper[4632]: E1201 06:57:49.811513 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1233c4-8d6c-46da-97f0-915f50fb429c" containerName="keystone-bootstrap" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.811538 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1233c4-8d6c-46da-97f0-915f50fb429c" containerName="keystone-bootstrap" Dec 01 06:57:49 crc kubenswrapper[4632]: E1201 06:57:49.811592 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2e713bc-63b1-4611-9f78-48a7bc34ddeb" containerName="init" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.811598 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2e713bc-63b1-4611-9f78-48a7bc34ddeb" containerName="init" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.811846 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2e713bc-63b1-4611-9f78-48a7bc34ddeb" containerName="init" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.811859 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a1233c4-8d6c-46da-97f0-915f50fb429c" containerName="keystone-bootstrap" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.812645 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.815097 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.815287 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.815780 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.816412 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ll652" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.816674 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.820718 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nbn5v"] Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888435 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888517 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888554 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888618 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888804 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.888909 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7642\" (UniqueName: \"kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991253 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991400 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991468 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7642\" (UniqueName: \"kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991612 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991696 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.991745 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.996294 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.998228 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:49 crc kubenswrapper[4632]: I1201 06:57:49.998781 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.004796 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.010075 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7642\" (UniqueName: \"kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.010709 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle\") pod \"keystone-bootstrap-nbn5v\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.130921 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:57:50 crc kubenswrapper[4632]: E1201 06:57:50.351794 4632 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57384d37_fe8e_4534_b99f_579737abcab7.slice/crio-415a7ab3b8973040458a0bc2b77217ebe5a8d21230f0ba79b29cbb2d16521e8c.scope\": RecentStats: unable to find data in memory cache]" Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.555787 4632 generic.go:334] "Generic (PLEG): container finished" podID="57384d37-fe8e-4534-b99f-579737abcab7" containerID="415a7ab3b8973040458a0bc2b77217ebe5a8d21230f0ba79b29cbb2d16521e8c" exitCode=0 Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.555937 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4rvbb" event={"ID":"57384d37-fe8e-4534-b99f-579737abcab7","Type":"ContainerDied","Data":"415a7ab3b8973040458a0bc2b77217ebe5a8d21230f0ba79b29cbb2d16521e8c"} Dec 01 06:57:50 crc kubenswrapper[4632]: I1201 06:57:50.762389 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a1233c4-8d6c-46da-97f0-915f50fb429c" path="/var/lib/kubelet/pods/1a1233c4-8d6c-46da-97f0-915f50fb429c/volumes" Dec 01 06:57:51 crc kubenswrapper[4632]: I1201 06:57:51.098493 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:57:51 crc kubenswrapper[4632]: I1201 06:57:51.174630 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:51 crc kubenswrapper[4632]: I1201 06:57:51.174871 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="dnsmasq-dns" containerID="cri-o://16e4cb4e10f1dcd6f09481ea16c2622eeb770565ca91a8bf7a1fcc13dce3d022" gracePeriod=10 Dec 01 06:57:51 crc kubenswrapper[4632]: I1201 06:57:51.567512 4632 generic.go:334] "Generic (PLEG): container finished" podID="bd873704-df6d-4418-ba91-4ecf29de059f" containerID="16e4cb4e10f1dcd6f09481ea16c2622eeb770565ca91a8bf7a1fcc13dce3d022" exitCode=0 Dec 01 06:57:51 crc kubenswrapper[4632]: I1201 06:57:51.567594 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" event={"ID":"bd873704-df6d-4418-ba91-4ecf29de059f","Type":"ContainerDied","Data":"16e4cb4e10f1dcd6f09481ea16c2622eeb770565ca91a8bf7a1fcc13dce3d022"} Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.512625 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.122:5353: connect: connection refused" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.700700 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.712094 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749060 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749492 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749561 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzclj\" (UniqueName: \"kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749596 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749680 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749748 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749780 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749804 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749836 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749864 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.749946 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.750023 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.750102 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.750207 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.750385 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjqx6\" (UniqueName: \"kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6\") pod \"9f344c97-e869-4a69-b084-bd94330e7e79\" (UID: \"9f344c97-e869-4a69-b084-bd94330e7e79\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.750687 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\" (UID: \"7f8691a1-4440-42f2-a472-ed03ac4f61b6\") " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.753739 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts" (OuterVolumeSpecName: "scripts") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.754058 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs" (OuterVolumeSpecName: "logs") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.754283 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.754509 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.756319 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.759674 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.760402 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs" (OuterVolumeSpecName: "logs") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.762934 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6" (OuterVolumeSpecName: "kube-api-access-qjqx6") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "kube-api-access-qjqx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.766506 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts" (OuterVolumeSpecName: "scripts") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.776701 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj" (OuterVolumeSpecName: "kube-api-access-kzclj") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "kube-api-access-kzclj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.788520 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.791630 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.813916 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data" (OuterVolumeSpecName: "config-data") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.814401 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9f344c97-e869-4a69-b084-bd94330e7e79" (UID: "9f344c97-e869-4a69-b084-bd94330e7e79"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.814708 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.822651 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data" (OuterVolumeSpecName: "config-data") pod "7f8691a1-4440-42f2-a472-ed03ac4f61b6" (UID: "7f8691a1-4440-42f2-a472-ed03ac4f61b6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853843 4632 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853874 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzclj\" (UniqueName: \"kubernetes.io/projected/7f8691a1-4440-42f2-a472-ed03ac4f61b6-kube-api-access-kzclj\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853906 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853916 4632 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853929 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7f8691a1-4440-42f2-a472-ed03ac4f61b6-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853938 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853948 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853958 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853967 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853977 4632 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853987 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.853996 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7f8691a1-4440-42f2-a472-ed03ac4f61b6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.854005 4632 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9f344c97-e869-4a69-b084-bd94330e7e79-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.854014 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjqx6\" (UniqueName: \"kubernetes.io/projected/9f344c97-e869-4a69-b084-bd94330e7e79-kube-api-access-qjqx6\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.854029 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.854047 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f344c97-e869-4a69-b084-bd94330e7e79-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.867475 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.868389 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.956723 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:52 crc kubenswrapper[4632]: I1201 06:57:52.956749 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.173755 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.214927 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.262990 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263277 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle\") pod \"57384d37-fe8e-4534-b99f-579737abcab7\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263329 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263379 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263422 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wg5f\" (UniqueName: \"kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f\") pod \"57384d37-fe8e-4534-b99f-579737abcab7\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263594 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5vm6\" (UniqueName: \"kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263704 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263729 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb\") pod \"bd873704-df6d-4418-ba91-4ecf29de059f\" (UID: \"bd873704-df6d-4418-ba91-4ecf29de059f\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.263747 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config\") pod \"57384d37-fe8e-4534-b99f-579737abcab7\" (UID: \"57384d37-fe8e-4534-b99f-579737abcab7\") " Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.268277 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f" (OuterVolumeSpecName: "kube-api-access-6wg5f") pod "57384d37-fe8e-4534-b99f-579737abcab7" (UID: "57384d37-fe8e-4534-b99f-579737abcab7"). InnerVolumeSpecName "kube-api-access-6wg5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.296581 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6" (OuterVolumeSpecName: "kube-api-access-x5vm6") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "kube-api-access-x5vm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.303841 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config" (OuterVolumeSpecName: "config") pod "57384d37-fe8e-4534-b99f-579737abcab7" (UID: "57384d37-fe8e-4534-b99f-579737abcab7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.311760 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57384d37-fe8e-4534-b99f-579737abcab7" (UID: "57384d37-fe8e-4534-b99f-579737abcab7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.348979 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.368029 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wg5f\" (UniqueName: \"kubernetes.io/projected/57384d37-fe8e-4534-b99f-579737abcab7-kube-api-access-6wg5f\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.368068 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5vm6\" (UniqueName: \"kubernetes.io/projected/bd873704-df6d-4418-ba91-4ecf29de059f-kube-api-access-x5vm6\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.368079 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.368089 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57384d37-fe8e-4534-b99f-579737abcab7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.368098 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.381189 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.385367 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config" (OuterVolumeSpecName: "config") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.409781 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.410941 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bd873704-df6d-4418-ba91-4ecf29de059f" (UID: "bd873704-df6d-4418-ba91-4ecf29de059f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.470145 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.470180 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.470193 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.470203 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bd873704-df6d-4418-ba91-4ecf29de059f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.588213 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"7f8691a1-4440-42f2-a472-ed03ac4f61b6","Type":"ContainerDied","Data":"117f53cee2825ad0177d416712219c4f541317e20a669831f67a4c1073fc3baa"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.588471 4632 scope.go:117] "RemoveContainer" containerID="db661bee75d522dba94bc8b09832ff78effe276b958f016a2cc24c1fa0423892" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.588244 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.591855 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bqchp" event={"ID":"17a28608-4b5e-435b-868f-0c9cc98f7c91","Type":"ContainerStarted","Data":"9a1fbceb62690412f3971e3668768c4c4764e31aed7fc2504b7611591da48db9"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.619419 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"9f344c97-e869-4a69-b084-bd94330e7e79","Type":"ContainerDied","Data":"38529b14f6674eccd6aea9f55ebc54c4181d8d558bc98579fdc3c7c890a0d2d0"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.619612 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.623795 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-nbn5v"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.627000 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-4rvbb" event={"ID":"57384d37-fe8e-4534-b99f-579737abcab7","Type":"ContainerDied","Data":"ae35a2561dc4e63547cdd7f0a7012f15339f4f7e5891b73b63086e7e23599505"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.627046 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae35a2561dc4e63547cdd7f0a7012f15339f4f7e5891b73b63086e7e23599505" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.627007 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-4rvbb" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.631991 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xvgfc" event={"ID":"803c9fb6-6650-4865-b7f1-ed485299302f","Type":"ContainerStarted","Data":"c7ac4a849f34520e3c55784649c1f2efb713a2dd22885c3fa6ab81afe2f36b63"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.635209 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bqchp" podStartSLOduration=2.215515661 podStartE2EDuration="13.63518338s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="2025-12-01 06:57:41.787214286 +0000 UTC m=+871.352227259" lastFinishedPulling="2025-12-01 06:57:53.206882004 +0000 UTC m=+882.771894978" observedRunningTime="2025-12-01 06:57:53.620930643 +0000 UTC m=+883.185943616" watchObservedRunningTime="2025-12-01 06:57:53.63518338 +0000 UTC m=+883.200196353" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.635570 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerStarted","Data":"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.638624 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" event={"ID":"bd873704-df6d-4418-ba91-4ecf29de059f","Type":"ContainerDied","Data":"cbd3a8a4998c297d45b14ec49e88ab4352af4a566838907ede1e41ddfd06ce39"} Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.638758 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74ccfcf59-pw9tv" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.675549 4632 scope.go:117] "RemoveContainer" containerID="55e33435c6dcc70fae7690563b9fd9b7f72a8ceb3a8b5716666e9c2d3fcf92d9" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.677578 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-xvgfc" podStartSLOduration=2.099657046 podStartE2EDuration="13.677562889s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="2025-12-01 06:57:41.639497158 +0000 UTC m=+871.204510132" lastFinishedPulling="2025-12-01 06:57:53.217403002 +0000 UTC m=+882.782415975" observedRunningTime="2025-12-01 06:57:53.647990129 +0000 UTC m=+883.213003103" watchObservedRunningTime="2025-12-01 06:57:53.677562889 +0000 UTC m=+883.242575862" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.706483 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.720431 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732100 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732616 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="dnsmasq-dns" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732637 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="dnsmasq-dns" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732650 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732657 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732671 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732677 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732687 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57384d37-fe8e-4534-b99f-579737abcab7" containerName="neutron-db-sync" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732692 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="57384d37-fe8e-4534-b99f-579737abcab7" containerName="neutron-db-sync" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732705 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732710 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732721 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="init" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732726 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="init" Dec 01 06:57:53 crc kubenswrapper[4632]: E1201 06:57:53.732747 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.732752 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733120 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733138 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="57384d37-fe8e-4534-b99f-579737abcab7" containerName="neutron-db-sync" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733148 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733166 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" containerName="glance-log" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733182 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" containerName="dnsmasq-dns" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.733196 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" containerName="glance-httpd" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.734872 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.737335 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.740924 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.741209 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-8f9x2" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.741339 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.769188 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.778921 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74ccfcf59-pw9tv"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.797075 4632 scope.go:117] "RemoveContainer" containerID="85c79dbc17b734ee913b2c055c283d6d3fa0767e470a2ee53dc85517dc378592" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.797211 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.810470 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.818125 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.831730 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.833916 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.841892 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.844265 4632 scope.go:117] "RemoveContainer" containerID="6117ee822881c7bc7f3377a1f925c942768481ddf6eac4227615f2b8ecd95bc5" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.845196 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.845419 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876323 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876406 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876435 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w9lr\" (UniqueName: \"kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876485 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876504 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876584 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876626 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.876667 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.892635 4632 scope.go:117] "RemoveContainer" containerID="16e4cb4e10f1dcd6f09481ea16c2622eeb770565ca91a8bf7a1fcc13dce3d022" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.914903 4632 scope.go:117] "RemoveContainer" containerID="2680c8ca0faf875e6b868f3b99892f2fb7b6a3473a633b598d6299435bca6919" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.980163 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.980860 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.980942 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981005 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981190 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981243 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981265 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981299 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czgm8\" (UniqueName: \"kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981370 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981408 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w9lr\" (UniqueName: \"kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981477 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981499 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981519 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981622 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981675 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.981704 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.982183 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.983199 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.983461 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.983990 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.990070 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.991642 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:53 crc kubenswrapper[4632]: I1201 06:57:53.992903 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.000983 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w9lr\" (UniqueName: \"kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.006744 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " pod="openstack/glance-default-external-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083199 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083248 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083271 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083324 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083343 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083382 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czgm8\" (UniqueName: \"kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083444 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083507 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.083955 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.084652 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.084778 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.088252 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.097931 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czgm8\" (UniqueName: \"kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.098018 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.098629 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.098641 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.107288 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.118379 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.172578 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.482642 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.484338 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.508371 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.600904 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.600966 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cchmz\" (UniqueName: \"kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.601080 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.601100 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.601143 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.601182 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.663799 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.665247 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.672169 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-fbrw7" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.672482 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.672620 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.672713 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.674323 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.689563 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nbn5v" event={"ID":"64108b12-b957-4e10-be96-e49cab11acdc","Type":"ContainerStarted","Data":"44a1d0b62e06750c4258ee13d7ff0b063a11a7ed141608992782e1f8a77ba7fa"} Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.689617 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nbn5v" event={"ID":"64108b12-b957-4e10-be96-e49cab11acdc","Type":"ContainerStarted","Data":"2154c3b83a74d22ab773abf5e7c9675cbe61c6189638b4ad4d1377f16edc834b"} Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704183 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704272 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cchmz\" (UniqueName: \"kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704590 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704617 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704656 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.704730 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.705706 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.711060 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.711826 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.712529 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.717605 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-nbn5v" podStartSLOduration=5.717584918 podStartE2EDuration="5.717584918s" podCreationTimestamp="2025-12-01 06:57:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:54.712008448 +0000 UTC m=+884.277021421" watchObservedRunningTime="2025-12-01 06:57:54.717584918 +0000 UTC m=+884.282597891" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.719780 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.728199 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cchmz\" (UniqueName: \"kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz\") pod \"dnsmasq-dns-6bb95744f5-z46xh\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.796285 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f8691a1-4440-42f2-a472-ed03ac4f61b6" path="/var/lib/kubelet/pods/7f8691a1-4440-42f2-a472-ed03ac4f61b6/volumes" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.797618 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f344c97-e869-4a69-b084-bd94330e7e79" path="/var/lib/kubelet/pods/9f344c97-e869-4a69-b084-bd94330e7e79/volumes" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.817837 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd873704-df6d-4418-ba91-4ecf29de059f" path="/var/lib/kubelet/pods/bd873704-df6d-4418-ba91-4ecf29de059f/volumes" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.818693 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.825594 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.825687 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.825729 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.825876 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.825954 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmdb4\" (UniqueName: \"kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.840294 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.848335 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.927218 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.927281 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.927309 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.927474 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.927532 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmdb4\" (UniqueName: \"kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.933913 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.941065 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.942679 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.943912 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.956024 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmdb4\" (UniqueName: \"kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4\") pod \"neutron-787fbf84bd-fhwb4\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:54 crc kubenswrapper[4632]: I1201 06:57:54.995820 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.408919 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.726309 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerStarted","Data":"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6"} Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.726805 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerStarted","Data":"57b5ed92ac9d2c09906de429fcc9b8060558073959e559501eb7399a716c5e36"} Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.732858 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerStarted","Data":"1947181e6684674fd6e975c6f6b88a0ba9ba484a7872c2263cf7152e6f4d8fca"} Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.743518 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" event={"ID":"77dc536d-ddc2-4490-909c-2f59f184d3ca","Type":"ContainerStarted","Data":"c5f67f04ed454c17782997e274ad6afb6c13de5f204f6fdf4269bf6bb91b66cf"} Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.745277 4632 generic.go:334] "Generic (PLEG): container finished" podID="17a28608-4b5e-435b-868f-0c9cc98f7c91" containerID="9a1fbceb62690412f3971e3668768c4c4764e31aed7fc2504b7611591da48db9" exitCode=0 Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.745307 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bqchp" event={"ID":"17a28608-4b5e-435b-868f-0c9cc98f7c91","Type":"ContainerDied","Data":"9a1fbceb62690412f3971e3668768c4c4764e31aed7fc2504b7611591da48db9"} Dec 01 06:57:55 crc kubenswrapper[4632]: I1201 06:57:55.881283 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.761109 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerStarted","Data":"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.764099 4632 generic.go:334] "Generic (PLEG): container finished" podID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerID="63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9" exitCode=0 Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.764301 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" event={"ID":"77dc536d-ddc2-4490-909c-2f59f184d3ca","Type":"ContainerDied","Data":"63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.769707 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerStarted","Data":"ae197b02fc7549c143e60ac010845904bad62578b7b8ecbf7fcdbbd81ff1207e"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.769761 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerStarted","Data":"d0cc1061f53bf12326249897324c29a70a53d4386a7ef4c02658a787fbd13f69"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.769772 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerStarted","Data":"f2af2f4b7ffdcb58796cfeb92df91b8aa3553d61892a75e3a4e572cff4984686"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.770810 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.772475 4632 generic.go:334] "Generic (PLEG): container finished" podID="803c9fb6-6650-4865-b7f1-ed485299302f" containerID="c7ac4a849f34520e3c55784649c1f2efb713a2dd22885c3fa6ab81afe2f36b63" exitCode=0 Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.772528 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xvgfc" event={"ID":"803c9fb6-6650-4865-b7f1-ed485299302f","Type":"ContainerDied","Data":"c7ac4a849f34520e3c55784649c1f2efb713a2dd22885c3fa6ab81afe2f36b63"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.774707 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerStarted","Data":"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.774732 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerStarted","Data":"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b"} Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.839400 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.8393802 podStartE2EDuration="3.8393802s" podCreationTimestamp="2025-12-01 06:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:56.836616486 +0000 UTC m=+886.401629469" watchObservedRunningTime="2025-12-01 06:57:56.8393802 +0000 UTC m=+886.404393173" Dec 01 06:57:56 crc kubenswrapper[4632]: I1201 06:57:56.882797 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-787fbf84bd-fhwb4" podStartSLOduration=2.882778871 podStartE2EDuration="2.882778871s" podCreationTimestamp="2025-12-01 06:57:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:56.87613154 +0000 UTC m=+886.441144513" watchObservedRunningTime="2025-12-01 06:57:56.882778871 +0000 UTC m=+886.447791844" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.136757 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-594d88dfbf-66tbw"] Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.138611 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.140991 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.141177 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.162494 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594d88dfbf-66tbw"] Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209567 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-combined-ca-bundle\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209695 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209723 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-httpd-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209790 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-public-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209812 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2g9wh\" (UniqueName: \"kubernetes.io/projected/dd0f52ca-bba5-4410-9473-ac86c9839cf6-kube-api-access-2g9wh\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209919 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-ovndb-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.209987 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-internal-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.284869 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.345145 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.346589 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-httpd-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.346701 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-public-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.346736 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2g9wh\" (UniqueName: \"kubernetes.io/projected/dd0f52ca-bba5-4410-9473-ac86c9839cf6-kube-api-access-2g9wh\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.346879 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-ovndb-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.346970 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-internal-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.347017 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-combined-ca-bundle\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.369505 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-httpd-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.369582 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-ovndb-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.371683 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-internal-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.375623 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-config\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.376947 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2g9wh\" (UniqueName: \"kubernetes.io/projected/dd0f52ca-bba5-4410-9473-ac86c9839cf6-kube-api-access-2g9wh\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.378489 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-combined-ca-bundle\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.380375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd0f52ca-bba5-4410-9473-ac86c9839cf6-public-tls-certs\") pod \"neutron-594d88dfbf-66tbw\" (UID: \"dd0f52ca-bba5-4410-9473-ac86c9839cf6\") " pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.449093 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle\") pod \"17a28608-4b5e-435b-868f-0c9cc98f7c91\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.449188 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftc9q\" (UniqueName: \"kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q\") pod \"17a28608-4b5e-435b-868f-0c9cc98f7c91\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.450797 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts\") pod \"17a28608-4b5e-435b-868f-0c9cc98f7c91\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.455705 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data\") pod \"17a28608-4b5e-435b-868f-0c9cc98f7c91\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.455893 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs\") pod \"17a28608-4b5e-435b-868f-0c9cc98f7c91\" (UID: \"17a28608-4b5e-435b-868f-0c9cc98f7c91\") " Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.456801 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts" (OuterVolumeSpecName: "scripts") pod "17a28608-4b5e-435b-868f-0c9cc98f7c91" (UID: "17a28608-4b5e-435b-868f-0c9cc98f7c91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.456838 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs" (OuterVolumeSpecName: "logs") pod "17a28608-4b5e-435b-868f-0c9cc98f7c91" (UID: "17a28608-4b5e-435b-868f-0c9cc98f7c91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.457635 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17a28608-4b5e-435b-868f-0c9cc98f7c91-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.457722 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.460555 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q" (OuterVolumeSpecName: "kube-api-access-ftc9q") pod "17a28608-4b5e-435b-868f-0c9cc98f7c91" (UID: "17a28608-4b5e-435b-868f-0c9cc98f7c91"). InnerVolumeSpecName "kube-api-access-ftc9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.466930 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.474834 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data" (OuterVolumeSpecName: "config-data") pod "17a28608-4b5e-435b-868f-0c9cc98f7c91" (UID: "17a28608-4b5e-435b-868f-0c9cc98f7c91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.476587 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17a28608-4b5e-435b-868f-0c9cc98f7c91" (UID: "17a28608-4b5e-435b-868f-0c9cc98f7c91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.566756 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.566797 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17a28608-4b5e-435b-868f-0c9cc98f7c91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.566812 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftc9q\" (UniqueName: \"kubernetes.io/projected/17a28608-4b5e-435b-868f-0c9cc98f7c91-kube-api-access-ftc9q\") on node \"crc\" DevicePath \"\"" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.636966 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-dnt67" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" probeResult="failure" output=< Dec 01 06:57:57 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 06:57:57 crc kubenswrapper[4632]: > Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.800628 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerStarted","Data":"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8"} Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.803427 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" event={"ID":"77dc536d-ddc2-4490-909c-2f59f184d3ca","Type":"ContainerStarted","Data":"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409"} Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.803936 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.805404 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bqchp" event={"ID":"17a28608-4b5e-435b-868f-0c9cc98f7c91","Type":"ContainerDied","Data":"7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e"} Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.805460 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7afe72922c530897cc63263efeabb66311fc5ca3ce6ae77af144a5c58afd8a9e" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.805562 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bqchp" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.839722 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.839699039 podStartE2EDuration="4.839699039s" podCreationTimestamp="2025-12-01 06:57:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:57.832736856 +0000 UTC m=+887.397749828" watchObservedRunningTime="2025-12-01 06:57:57.839699039 +0000 UTC m=+887.404712012" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.861237 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" podStartSLOduration=3.86121545 podStartE2EDuration="3.86121545s" podCreationTimestamp="2025-12-01 06:57:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:57:57.858282427 +0000 UTC m=+887.423295400" watchObservedRunningTime="2025-12-01 06:57:57.86121545 +0000 UTC m=+887.426228423" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.891878 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-8445898876-gptmm"] Dec 01 06:57:57 crc kubenswrapper[4632]: E1201 06:57:57.892467 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17a28608-4b5e-435b-868f-0c9cc98f7c91" containerName="placement-db-sync" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.892490 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="17a28608-4b5e-435b-868f-0c9cc98f7c91" containerName="placement-db-sync" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.892708 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="17a28608-4b5e-435b-868f-0c9cc98f7c91" containerName="placement-db-sync" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.893781 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.896307 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.896395 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.897016 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8445898876-gptmm"] Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.900951 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.901208 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 01 06:57:57 crc kubenswrapper[4632]: I1201 06:57:57.901454 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-672zc" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077393 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-combined-ca-bundle\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077675 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-config-data\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077708 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-public-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077759 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-internal-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077783 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7mm5\" (UniqueName: \"kubernetes.io/projected/0f2d46d4-637f-441b-8710-f1d82d8a0c11-kube-api-access-b7mm5\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077815 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f2d46d4-637f-441b-8710-f1d82d8a0c11-logs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.077845 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-scripts\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.112035 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-594d88dfbf-66tbw"] Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179408 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-combined-ca-bundle\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179463 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-config-data\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179488 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-public-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179527 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-internal-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179554 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7mm5\" (UniqueName: \"kubernetes.io/projected/0f2d46d4-637f-441b-8710-f1d82d8a0c11-kube-api-access-b7mm5\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179582 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f2d46d4-637f-441b-8710-f1d82d8a0c11-logs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.179607 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-scripts\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.180909 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f2d46d4-637f-441b-8710-f1d82d8a0c11-logs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.186026 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-public-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.187493 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-config-data\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.190842 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-internal-tls-certs\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.198584 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-scripts\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.199034 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f2d46d4-637f-441b-8710-f1d82d8a0c11-combined-ca-bundle\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.203798 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7mm5\" (UniqueName: \"kubernetes.io/projected/0f2d46d4-637f-441b-8710-f1d82d8a0c11-kube-api-access-b7mm5\") pod \"placement-8445898876-gptmm\" (UID: \"0f2d46d4-637f-441b-8710-f1d82d8a0c11\") " pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.220048 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-8445898876-gptmm" Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.819736 4632 generic.go:334] "Generic (PLEG): container finished" podID="64108b12-b957-4e10-be96-e49cab11acdc" containerID="44a1d0b62e06750c4258ee13d7ff0b063a11a7ed141608992782e1f8a77ba7fa" exitCode=0 Dec 01 06:57:58 crc kubenswrapper[4632]: I1201 06:57:58.819921 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nbn5v" event={"ID":"64108b12-b957-4e10-be96-e49cab11acdc","Type":"ContainerDied","Data":"44a1d0b62e06750c4258ee13d7ff0b063a11a7ed141608992782e1f8a77ba7fa"} Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.119159 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.119553 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.149602 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.152132 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.173399 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.173428 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.204514 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.209526 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.841546 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.877795 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.877839 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.877854 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.877863 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.892498 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:58:04 crc kubenswrapper[4632]: I1201 06:58:04.892771 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="dnsmasq-dns" containerID="cri-o://3715db80686bc9c87d6ca539617efe657208af1e85ad7c30266974cef42299e8" gracePeriod=10 Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.938176 4632 generic.go:334] "Generic (PLEG): container finished" podID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerID="3715db80686bc9c87d6ca539617efe657208af1e85ad7c30266974cef42299e8" exitCode=0 Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.938260 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" event={"ID":"42ac38c3-780c-44b4-9d50-aa59cdf15703","Type":"ContainerDied","Data":"3715db80686bc9c87d6ca539617efe657208af1e85ad7c30266974cef42299e8"} Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.963665 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594d88dfbf-66tbw" event={"ID":"dd0f52ca-bba5-4410-9473-ac86c9839cf6","Type":"ContainerStarted","Data":"6f7bd2581c654c0e259fdf7d3e14fc20dd9a18a75d50369c7d3ab2b7950553a0"} Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.974820 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.983972 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-nbn5v" event={"ID":"64108b12-b957-4e10-be96-e49cab11acdc","Type":"ContainerDied","Data":"2154c3b83a74d22ab773abf5e7c9675cbe61c6189638b4ad4d1377f16edc834b"} Dec 01 06:58:05 crc kubenswrapper[4632]: I1201 06:58:05.984013 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2154c3b83a74d22ab773abf5e7c9675cbe61c6189638b4ad4d1377f16edc834b" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.080924 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.081029 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.081151 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.081184 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.081612 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.081651 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7642\" (UniqueName: \"kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642\") pod \"64108b12-b957-4e10-be96-e49cab11acdc\" (UID: \"64108b12-b957-4e10-be96-e49cab11acdc\") " Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.099427 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.138:5353: connect: connection refused" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.105493 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642" (OuterVolumeSpecName: "kube-api-access-z7642") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "kube-api-access-z7642". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.106820 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts" (OuterVolumeSpecName: "scripts") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.109441 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.113416 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.146981 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.147554 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data" (OuterVolumeSpecName: "config-data") pod "64108b12-b957-4e10-be96-e49cab11acdc" (UID: "64108b12-b957-4e10-be96-e49cab11acdc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186186 4632 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186226 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186236 4632 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186246 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7642\" (UniqueName: \"kubernetes.io/projected/64108b12-b957-4e10-be96-e49cab11acdc-kube-api-access-z7642\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186258 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.186267 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64108b12-b957-4e10-be96-e49cab11acdc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.612056 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.664983 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.734366 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.798197 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.827836 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.848529 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 06:58:06 crc kubenswrapper[4632]: I1201 06:58:06.993776 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-nbn5v" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.072010 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-5d8c896fc4-b4shb"] Dec 01 06:58:07 crc kubenswrapper[4632]: E1201 06:58:07.072990 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64108b12-b957-4e10-be96-e49cab11acdc" containerName="keystone-bootstrap" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.073021 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="64108b12-b957-4e10-be96-e49cab11acdc" containerName="keystone-bootstrap" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.073312 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="64108b12-b957-4e10-be96-e49cab11acdc" containerName="keystone-bootstrap" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.073938 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077125 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077160 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077473 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077619 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-ll652" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077812 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.077936 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.084193 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5d8c896fc4-b4shb"] Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.208887 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-combined-ca-bundle\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209014 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-credential-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209153 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-fernet-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209192 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-config-data\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209371 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-public-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209579 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-scripts\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209647 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-internal-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.209956 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mf6v\" (UniqueName: \"kubernetes.io/projected/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-kube-api-access-6mf6v\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312484 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mf6v\" (UniqueName: \"kubernetes.io/projected/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-kube-api-access-6mf6v\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312611 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-combined-ca-bundle\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312642 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-credential-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312685 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-fernet-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312706 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-config-data\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312744 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-public-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312812 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-scripts\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.312849 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-internal-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.320293 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-public-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.320676 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-scripts\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.320818 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-internal-tls-certs\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.324626 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-credential-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.325062 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-fernet-keys\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.325186 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-config-data\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.327720 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mf6v\" (UniqueName: \"kubernetes.io/projected/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-kube-api-access-6mf6v\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.329461 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebd84a7e-560e-4bc0-b3e7-2f2c0843d789-combined-ca-bundle\") pod \"keystone-5d8c896fc4-b4shb\" (UID: \"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789\") " pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.392447 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:07 crc kubenswrapper[4632]: I1201 06:58:07.448398 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:58:08 crc kubenswrapper[4632]: I1201 06:58:08.001974 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dnt67" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" containerID="cri-o://84ac35f39972efda5ca7cb8f9eff4d97fc2331c636da36253602a60ae21f60a8" gracePeriod=2 Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.012110 4632 generic.go:334] "Generic (PLEG): container finished" podID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerID="84ac35f39972efda5ca7cb8f9eff4d97fc2331c636da36253602a60ae21f60a8" exitCode=0 Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.012212 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerDied","Data":"84ac35f39972efda5ca7cb8f9eff4d97fc2331c636da36253602a60ae21f60a8"} Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.861547 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.863882 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.878988 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.963872 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.963942 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65n6x\" (UniqueName: \"kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:09 crc kubenswrapper[4632]: I1201 06:58:09.964003 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.066512 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.066639 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65n6x\" (UniqueName: \"kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.066706 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.067098 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.067108 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.087200 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65n6x\" (UniqueName: \"kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x\") pod \"community-operators-psmqh\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.195562 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.592728 4632 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.592904 4632 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.593137 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z6mxl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-7q66w_openstack(6b176a21-27cf-4608-8787-e91a914be7cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.593815 4632 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = get image fs info unable to get usage for /var/lib/containers/storage/overlay-images: get disk usage for path /var/lib/containers/storage/overlay-images: lstat /var/lib/containers/storage/overlay-images/.tmp-images.json2184032822: no such file or directory" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.593876 4632 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: missing image stats: nil" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.594475 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-7q66w" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.817747 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:58:10 crc kubenswrapper[4632]: E1201 06:58:10.823810 4632 info.go:109] Failed to get network devices: open /sys/class/net/f3a6c8e73983a62/address: no such file or directory Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.847883 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.938126 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.982329 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.982738 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data\") pod \"803c9fb6-6650-4865-b7f1-ed485299302f\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.982874 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.982909 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-585br\" (UniqueName: \"kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br\") pod \"803c9fb6-6650-4865-b7f1-ed485299302f\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.983015 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle\") pod \"803c9fb6-6650-4865-b7f1-ed485299302f\" (UID: \"803c9fb6-6650-4865-b7f1-ed485299302f\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.983070 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.983127 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsg78\" (UniqueName: \"kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.983259 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.983306 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb\") pod \"42ac38c3-780c-44b4-9d50-aa59cdf15703\" (UID: \"42ac38c3-780c-44b4-9d50-aa59cdf15703\") " Dec 01 06:58:10 crc kubenswrapper[4632]: I1201 06:58:10.999721 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78" (OuterVolumeSpecName: "kube-api-access-zsg78") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "kube-api-access-zsg78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.000408 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "803c9fb6-6650-4865-b7f1-ed485299302f" (UID: "803c9fb6-6650-4865-b7f1-ed485299302f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.000896 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br" (OuterVolumeSpecName: "kube-api-access-585br") pod "803c9fb6-6650-4865-b7f1-ed485299302f" (UID: "803c9fb6-6650-4865-b7f1-ed485299302f"). InnerVolumeSpecName "kube-api-access-585br". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.035034 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594d88dfbf-66tbw" event={"ID":"dd0f52ca-bba5-4410-9473-ac86c9839cf6","Type":"ContainerStarted","Data":"5a6b7e699db6d541c86aa03059d1fa2bb7e64d82044a1a8a5c84b970cbef844d"} Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.038563 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-xvgfc" event={"ID":"803c9fb6-6650-4865-b7f1-ed485299302f","Type":"ContainerDied","Data":"72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da"} Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.038631 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72be5536bdc0bbb047e4488cdb60a9a9b51228318e760d073d001861c7eec2da" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.038732 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-xvgfc" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.041984 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config" (OuterVolumeSpecName: "config") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.042054 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerStarted","Data":"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030"} Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.050345 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "803c9fb6-6650-4865-b7f1-ed485299302f" (UID: "803c9fb6-6650-4865-b7f1-ed485299302f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.051065 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" event={"ID":"42ac38c3-780c-44b4-9d50-aa59cdf15703","Type":"ContainerDied","Data":"623c7afe76b43ea1a5713c9c42979d2566c1fc6ec700a00bc7228cf8f9ea6271"} Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.051139 4632 scope.go:117] "RemoveContainer" containerID="3715db80686bc9c87d6ca539617efe657208af1e85ad7c30266974cef42299e8" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.051296 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-76cff69bf5-b4scl" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.057964 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dnt67" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.058796 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dnt67" event={"ID":"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27","Type":"ContainerDied","Data":"f3a6c8e73983a621c5120688f8d207a827bfe4efeb75f8cbe1cce5abe3019b4f"} Dec 01 06:58:11 crc kubenswrapper[4632]: E1201 06:58:11.059704 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-cinder-api:fa2bb8efef6782c26ea7f1675eeb36dd\\\"\"" pod="openstack/cinder-db-sync-7q66w" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.073459 4632 scope.go:117] "RemoveContainer" containerID="b78499eaf2d197b54c7bc1972dfd53bb41d68cc3a8c625d97a7c283da05d5ef7" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.073779 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.075022 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.080271 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.087890 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gznh7\" (UniqueName: \"kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7\") pod \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.087986 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content\") pod \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.088172 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities\") pod \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\" (UID: \"1ce86ad4-7718-4227-a97b-2e9f2dc3bc27\") " Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.088671 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089280 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089304 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsg78\" (UniqueName: \"kubernetes.io/projected/42ac38c3-780c-44b4-9d50-aa59cdf15703-kube-api-access-zsg78\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089317 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089339 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089348 4632 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/803c9fb6-6650-4865-b7f1-ed485299302f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089370 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089378 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-585br\" (UniqueName: \"kubernetes.io/projected/803c9fb6-6650-4865-b7f1-ed485299302f-kube-api-access-585br\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.089898 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities" (OuterVolumeSpecName: "utilities") pod "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" (UID: "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.093863 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7" (OuterVolumeSpecName: "kube-api-access-gznh7") pod "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" (UID: "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27"). InnerVolumeSpecName "kube-api-access-gznh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.098892 4632 scope.go:117] "RemoveContainer" containerID="84ac35f39972efda5ca7cb8f9eff4d97fc2331c636da36253602a60ae21f60a8" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.104513 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "42ac38c3-780c-44b4-9d50-aa59cdf15703" (UID: "42ac38c3-780c-44b4-9d50-aa59cdf15703"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.121717 4632 scope.go:117] "RemoveContainer" containerID="3184f860516bb772e0587f57f1ea878a37a69bdfd92f36a71c6e79e8fe633995" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.150964 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.156478 4632 scope.go:117] "RemoveContainer" containerID="999da89cd0ca1508b89d3ef5319f13a1cd8d61e5c62d4f0ad1debc7f1fe5f283" Dec 01 06:58:11 crc kubenswrapper[4632]: W1201 06:58:11.163775 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda21bbafa_4cfb_4dc7_8fc1_16c567a9bb04.slice/crio-fcb7596bee82aa4242c8c3435e551e99504e5de2160aa8977d5d46e86d422edf WatchSource:0}: Error finding container fcb7596bee82aa4242c8c3435e551e99504e5de2160aa8977d5d46e86d422edf: Status 404 returned error can't find the container with id fcb7596bee82aa4242c8c3435e551e99504e5de2160aa8977d5d46e86d422edf Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.192713 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.192748 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/42ac38c3-780c-44b4-9d50-aa59cdf15703-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.192762 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gznh7\" (UniqueName: \"kubernetes.io/projected/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-kube-api-access-gznh7\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.202639 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" (UID: "1ce86ad4-7718-4227-a97b-2e9f2dc3bc27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.204106 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-8445898876-gptmm"] Dec 01 06:58:11 crc kubenswrapper[4632]: W1201 06:58:11.218830 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f2d46d4_637f_441b_8710_f1d82d8a0c11.slice/crio-f7638a7e7b74c27b4ecc6257b0a906ec9e9cfcb4ed480f982027f306e2459851 WatchSource:0}: Error finding container f7638a7e7b74c27b4ecc6257b0a906ec9e9cfcb4ed480f982027f306e2459851: Status 404 returned error can't find the container with id f7638a7e7b74c27b4ecc6257b0a906ec9e9cfcb4ed480f982027f306e2459851 Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.294159 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.344527 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-5d8c896fc4-b4shb"] Dec 01 06:58:11 crc kubenswrapper[4632]: W1201 06:58:11.348205 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podebd84a7e_560e_4bc0_b3e7_2f2c0843d789.slice/crio-b782b0dae25a7a3a7817c93e940a474d544f73940023d94e89c20e3b8d3ed30e WatchSource:0}: Error finding container b782b0dae25a7a3a7817c93e940a474d544f73940023d94e89c20e3b8d3ed30e: Status 404 returned error can't find the container with id b782b0dae25a7a3a7817c93e940a474d544f73940023d94e89c20e3b8d3ed30e Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.434672 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.447255 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dnt67"] Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.454170 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:58:11 crc kubenswrapper[4632]: I1201 06:58:11.459817 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-76cff69bf5-b4scl"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.058161 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5cffc97f9c-677mp"] Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.058898 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="extract-utilities" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.058922 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="extract-utilities" Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.058934 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="init" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.058941 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="init" Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.058967 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="dnsmasq-dns" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.058974 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="dnsmasq-dns" Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.058984 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.058990 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.059001 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="extract-content" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.059006 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="extract-content" Dec 01 06:58:12 crc kubenswrapper[4632]: E1201 06:58:12.059024 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="803c9fb6-6650-4865-b7f1-ed485299302f" containerName="barbican-db-sync" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.059030 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="803c9fb6-6650-4865-b7f1-ed485299302f" containerName="barbican-db-sync" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.059217 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" containerName="dnsmasq-dns" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.059240 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" containerName="registry-server" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.059251 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="803c9fb6-6650-4865-b7f1-ed485299302f" containerName="barbican-db-sync" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.060323 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.065471 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-jh9ph" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.065668 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.065696 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.074834 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5cffc97f9c-677mp"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.076088 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5d8c896fc4-b4shb" event={"ID":"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789","Type":"ContainerStarted","Data":"355370b5bf1072f4b499638bb54e0a1d2547f8214fd602d93aeca1fef6f9060b"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.076147 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-5d8c896fc4-b4shb" event={"ID":"ebd84a7e-560e-4bc0-b3e7-2f2c0843d789","Type":"ContainerStarted","Data":"b782b0dae25a7a3a7817c93e940a474d544f73940023d94e89c20e3b8d3ed30e"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.077189 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.093540 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-594d88dfbf-66tbw" event={"ID":"dd0f52ca-bba5-4410-9473-ac86c9839cf6","Type":"ContainerStarted","Data":"bc36cb0f80b2927197bc9119051df9520ebd0902eabe116509009dbd1d13e88b"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.094622 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.096698 4632 generic.go:334] "Generic (PLEG): container finished" podID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerID="214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579" exitCode=0 Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.096751 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerDied","Data":"214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.096773 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerStarted","Data":"fcb7596bee82aa4242c8c3435e551e99504e5de2160aa8977d5d46e86d422edf"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.103553 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8445898876-gptmm" event={"ID":"0f2d46d4-637f-441b-8710-f1d82d8a0c11","Type":"ContainerStarted","Data":"d0570de5bcf6db5ad8bcbb6e8fa0950feffa2400d0286bb6f14a711e1d8ca04b"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.103585 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8445898876-gptmm" event={"ID":"0f2d46d4-637f-441b-8710-f1d82d8a0c11","Type":"ContainerStarted","Data":"6cee7af57e0e1b3a014ffb4652bdb4c3c56bb217df5eac247144c5160c37eaaa"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.103599 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-8445898876-gptmm" event={"ID":"0f2d46d4-637f-441b-8710-f1d82d8a0c11","Type":"ContainerStarted","Data":"f7638a7e7b74c27b4ecc6257b0a906ec9e9cfcb4ed480f982027f306e2459851"} Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.103638 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8445898876-gptmm" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.103661 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-8445898876-gptmm" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.122124 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-5d8c896fc4-b4shb" podStartSLOduration=5.122112625 podStartE2EDuration="5.122112625s" podCreationTimestamp="2025-12-01 06:58:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:12.115065451 +0000 UTC m=+901.680078424" watchObservedRunningTime="2025-12-01 06:58:12.122112625 +0000 UTC m=+901.687125598" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.141703 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.147344 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.155905 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-58b4798-zm5jf"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.157731 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.159836 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.173223 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.216177 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aec57a49-c244-4fad-81c2-b29649e62945-logs\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.216229 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-combined-ca-bundle\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.216497 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data-custom\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.217386 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k59rz\" (UniqueName: \"kubernetes.io/projected/aec57a49-c244-4fad-81c2-b29649e62945-kube-api-access-k59rz\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.217428 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.230666 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58b4798-zm5jf"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.242066 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-8445898876-gptmm" podStartSLOduration=15.242044936 podStartE2EDuration="15.242044936s" podCreationTimestamp="2025-12-01 06:57:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:12.203052287 +0000 UTC m=+901.768065259" watchObservedRunningTime="2025-12-01 06:58:12.242044936 +0000 UTC m=+901.807057909" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.274296 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-594d88dfbf-66tbw" podStartSLOduration=15.274273314 podStartE2EDuration="15.274273314s" podCreationTimestamp="2025-12-01 06:57:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:12.230668224 +0000 UTC m=+901.795681196" watchObservedRunningTime="2025-12-01 06:58:12.274273314 +0000 UTC m=+901.839286288" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.320923 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data-custom\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.321006 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.321123 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdcqb\" (UniqueName: \"kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.321507 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data-custom\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.321864 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k59rz\" (UniqueName: \"kubernetes.io/projected/aec57a49-c244-4fad-81c2-b29649e62945-kube-api-access-k59rz\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.321933 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322142 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322188 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322225 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16fb6740-33c0-4a6d-8711-34f7520087a5-logs\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322374 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aec57a49-c244-4fad-81c2-b29649e62945-logs\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322402 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-combined-ca-bundle\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.322639 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmdlh\" (UniqueName: \"kubernetes.io/projected/16fb6740-33c0-4a6d-8711-34f7520087a5-kube-api-access-nmdlh\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.323839 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-combined-ca-bundle\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.323961 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.324018 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.324042 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.327548 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aec57a49-c244-4fad-81c2-b29649e62945-logs\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.334996 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.343342 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-config-data-custom\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.345214 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k59rz\" (UniqueName: \"kubernetes.io/projected/aec57a49-c244-4fad-81c2-b29649e62945-kube-api-access-k59rz\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.346663 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aec57a49-c244-4fad-81c2-b29649e62945-combined-ca-bundle\") pod \"barbican-worker-5cffc97f9c-677mp\" (UID: \"aec57a49-c244-4fad-81c2-b29649e62945\") " pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.357009 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.360582 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.362299 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.377122 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.386641 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5cffc97f9c-677mp" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.426284 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmdlh\" (UniqueName: \"kubernetes.io/projected/16fb6740-33c0-4a6d-8711-34f7520087a5-kube-api-access-nmdlh\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.426741 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-combined-ca-bundle\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.426957 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.427582 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.427616 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.427681 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.427731 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdcqb\" (UniqueName: \"kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.427752 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data-custom\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429010 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429190 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429218 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429245 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16fb6740-33c0-4a6d-8711-34f7520087a5-logs\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429597 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.429692 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/16fb6740-33c0-4a6d-8711-34f7520087a5-logs\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.430206 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.430287 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.431051 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.433183 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-combined-ca-bundle\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.433441 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.433962 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16fb6740-33c0-4a6d-8711-34f7520087a5-config-data-custom\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.441068 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmdlh\" (UniqueName: \"kubernetes.io/projected/16fb6740-33c0-4a6d-8711-34f7520087a5-kube-api-access-nmdlh\") pod \"barbican-keystone-listener-58b4798-zm5jf\" (UID: \"16fb6740-33c0-4a6d-8711-34f7520087a5\") " pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.445444 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdcqb\" (UniqueName: \"kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb\") pod \"dnsmasq-dns-56d8484fc-ddw26\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.505297 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.530578 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d294m\" (UniqueName: \"kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.530639 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.530674 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.530763 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.531108 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.552127 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.633314 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.633642 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d294m\" (UniqueName: \"kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.633747 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.633787 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.633896 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.636538 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.640135 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.642921 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.644006 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.650938 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d294m\" (UniqueName: \"kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m\") pod \"barbican-api-94fdc9c94-ghwxr\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.767932 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce86ad4-7718-4227-a97b-2e9f2dc3bc27" path="/var/lib/kubelet/pods/1ce86ad4-7718-4227-a97b-2e9f2dc3bc27/volumes" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.768748 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42ac38c3-780c-44b4-9d50-aa59cdf15703" path="/var/lib/kubelet/pods/42ac38c3-780c-44b4-9d50-aa59cdf15703/volumes" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.808907 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.829271 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5cffc97f9c-677mp"] Dec 01 06:58:12 crc kubenswrapper[4632]: W1201 06:58:12.854342 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaec57a49_c244_4fad_81c2_b29649e62945.slice/crio-f2c63a629a1c3e3633d00839c51213c2bbb450562adbc73a81124f90e5853b39 WatchSource:0}: Error finding container f2c63a629a1c3e3633d00839c51213c2bbb450562adbc73a81124f90e5853b39: Status 404 returned error can't find the container with id f2c63a629a1c3e3633d00839c51213c2bbb450562adbc73a81124f90e5853b39 Dec 01 06:58:12 crc kubenswrapper[4632]: I1201 06:58:12.973982 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:12 crc kubenswrapper[4632]: W1201 06:58:12.987614 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9acc4130_8f32_48b6_8174_2062cefef802.slice/crio-3e1f93637b675ccd248669f621423eddf33d07ea762782a73eee4ee9df38aabc WatchSource:0}: Error finding container 3e1f93637b675ccd248669f621423eddf33d07ea762782a73eee4ee9df38aabc: Status 404 returned error can't find the container with id 3e1f93637b675ccd248669f621423eddf33d07ea762782a73eee4ee9df38aabc Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.051430 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58b4798-zm5jf"] Dec 01 06:58:13 crc kubenswrapper[4632]: W1201 06:58:13.063953 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16fb6740_33c0_4a6d_8711_34f7520087a5.slice/crio-468a0c94b6387878476e21bcafed26bebe7a8467664714e141ea6783472730d8 WatchSource:0}: Error finding container 468a0c94b6387878476e21bcafed26bebe7a8467664714e141ea6783472730d8: Status 404 returned error can't find the container with id 468a0c94b6387878476e21bcafed26bebe7a8467664714e141ea6783472730d8 Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.150802 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5cffc97f9c-677mp" event={"ID":"aec57a49-c244-4fad-81c2-b29649e62945","Type":"ContainerStarted","Data":"f2c63a629a1c3e3633d00839c51213c2bbb450562adbc73a81124f90e5853b39"} Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.152620 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" event={"ID":"9acc4130-8f32-48b6-8174-2062cefef802","Type":"ContainerStarted","Data":"3e1f93637b675ccd248669f621423eddf33d07ea762782a73eee4ee9df38aabc"} Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.154856 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerStarted","Data":"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec"} Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.156910 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" event={"ID":"16fb6740-33c0-4a6d-8711-34f7520087a5","Type":"ContainerStarted","Data":"468a0c94b6387878476e21bcafed26bebe7a8467664714e141ea6783472730d8"} Dec 01 06:58:13 crc kubenswrapper[4632]: I1201 06:58:13.258414 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:13 crc kubenswrapper[4632]: W1201 06:58:13.328766 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64b038c3_a697_4612_a2a1_1701578beb7f.slice/crio-d9b1e7c54bbdeb8246b151f4428c9891b7d244a21f55a12e72cea92c2b481c75 WatchSource:0}: Error finding container d9b1e7c54bbdeb8246b151f4428c9891b7d244a21f55a12e72cea92c2b481c75: Status 404 returned error can't find the container with id d9b1e7c54bbdeb8246b151f4428c9891b7d244a21f55a12e72cea92c2b481c75 Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.171579 4632 generic.go:334] "Generic (PLEG): container finished" podID="9acc4130-8f32-48b6-8174-2062cefef802" containerID="e5c9e1965713c2b81e566d85c34e435aaa7dbe1b7ee89afb533aac5dd51d51b2" exitCode=0 Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.171636 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" event={"ID":"9acc4130-8f32-48b6-8174-2062cefef802","Type":"ContainerDied","Data":"e5c9e1965713c2b81e566d85c34e435aaa7dbe1b7ee89afb533aac5dd51d51b2"} Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.182150 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerStarted","Data":"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e"} Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.182196 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerStarted","Data":"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715"} Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.182208 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerStarted","Data":"d9b1e7c54bbdeb8246b151f4428c9891b7d244a21f55a12e72cea92c2b481c75"} Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.183224 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.183248 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.191676 4632 generic.go:334] "Generic (PLEG): container finished" podID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerID="d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec" exitCode=0 Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.193132 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerDied","Data":"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec"} Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.225850 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-94fdc9c94-ghwxr" podStartSLOduration=2.225836536 podStartE2EDuration="2.225836536s" podCreationTimestamp="2025-12-01 06:58:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:14.224759715 +0000 UTC m=+903.789772688" watchObservedRunningTime="2025-12-01 06:58:14.225836536 +0000 UTC m=+903.790849509" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.836403 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5cb7c5c954-lttm2"] Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.838381 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.842191 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.842438 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 01 06:58:14 crc kubenswrapper[4632]: I1201 06:58:14.846058 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5cb7c5c954-lttm2"] Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:14.999828 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d67e2874-71ae-4106-80fd-5361439b8ea5-logs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000076 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-public-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000123 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-internal-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000189 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data-custom\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000330 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000400 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-424qm\" (UniqueName: \"kubernetes.io/projected/d67e2874-71ae-4106-80fd-5361439b8ea5-kube-api-access-424qm\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.000423 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-combined-ca-bundle\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102095 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102152 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-424qm\" (UniqueName: \"kubernetes.io/projected/d67e2874-71ae-4106-80fd-5361439b8ea5-kube-api-access-424qm\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102172 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-combined-ca-bundle\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102283 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-public-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102304 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d67e2874-71ae-4106-80fd-5361439b8ea5-logs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102328 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-internal-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.102377 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data-custom\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.103823 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d67e2874-71ae-4106-80fd-5361439b8ea5-logs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.107485 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-public-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.108127 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-combined-ca-bundle\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.108462 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-internal-tls-certs\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.108656 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.112746 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d67e2874-71ae-4106-80fd-5361439b8ea5-config-data-custom\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.120153 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-424qm\" (UniqueName: \"kubernetes.io/projected/d67e2874-71ae-4106-80fd-5361439b8ea5-kube-api-access-424qm\") pod \"barbican-api-5cb7c5c954-lttm2\" (UID: \"d67e2874-71ae-4106-80fd-5361439b8ea5\") " pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.161735 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.202068 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" event={"ID":"9acc4130-8f32-48b6-8174-2062cefef802","Type":"ContainerStarted","Data":"055b28008e0b9548da1f2d6efdde8a10be6215cb1aee42a2d6539007ca8c8e33"} Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.202156 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.206787 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerStarted","Data":"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046"} Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.222215 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" podStartSLOduration=3.222200103 podStartE2EDuration="3.222200103s" podCreationTimestamp="2025-12-01 06:58:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:15.219973624 +0000 UTC m=+904.784986617" watchObservedRunningTime="2025-12-01 06:58:15.222200103 +0000 UTC m=+904.787213077" Dec 01 06:58:15 crc kubenswrapper[4632]: I1201 06:58:15.248708 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-psmqh" podStartSLOduration=3.612542896 podStartE2EDuration="6.248688043s" podCreationTimestamp="2025-12-01 06:58:09 +0000 UTC" firstStartedPulling="2025-12-01 06:58:12.102579696 +0000 UTC m=+901.667592669" lastFinishedPulling="2025-12-01 06:58:14.738724844 +0000 UTC m=+904.303737816" observedRunningTime="2025-12-01 06:58:15.240759656 +0000 UTC m=+904.805772630" watchObservedRunningTime="2025-12-01 06:58:15.248688043 +0000 UTC m=+904.813701015" Dec 01 06:58:19 crc kubenswrapper[4632]: I1201 06:58:19.498559 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:58:19 crc kubenswrapper[4632]: I1201 06:58:19.499446 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.195958 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.197496 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.247784 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.257080 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" event={"ID":"16fb6740-33c0-4a6d-8711-34f7520087a5","Type":"ContainerStarted","Data":"7ff2ea5416c7f59d6e21a09f32c49bdac2cce842830ca03dae0b8d900455943c"} Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.261935 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerStarted","Data":"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c"} Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.262049 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-central-agent" containerID="cri-o://f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e" gracePeriod=30 Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.262085 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.262096 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="proxy-httpd" containerID="cri-o://bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c" gracePeriod=30 Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.262093 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="sg-core" containerID="cri-o://aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030" gracePeriod=30 Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.262163 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-notification-agent" containerID="cri-o://b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e" gracePeriod=30 Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.263817 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5cffc97f9c-677mp" event={"ID":"aec57a49-c244-4fad-81c2-b29649e62945","Type":"ContainerStarted","Data":"36a867bd71258ffc87bc379ccfcc46a074d7436374a5781d6de31f4a7da676d5"} Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.294457 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5cb7c5c954-lttm2"] Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.303822 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.831164655 podStartE2EDuration="40.30380457s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="2025-12-01 06:57:41.46250783 +0000 UTC m=+871.027520803" lastFinishedPulling="2025-12-01 06:58:19.935147744 +0000 UTC m=+909.500160718" observedRunningTime="2025-12-01 06:58:20.293821157 +0000 UTC m=+909.858834130" watchObservedRunningTime="2025-12-01 06:58:20.30380457 +0000 UTC m=+909.868817544" Dec 01 06:58:20 crc kubenswrapper[4632]: W1201 06:58:20.310901 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd67e2874_71ae_4106_80fd_5361439b8ea5.slice/crio-6bd1291a70d77f3031dcb821fad7f47c256831b640757b873d252e4eb7436cc2 WatchSource:0}: Error finding container 6bd1291a70d77f3031dcb821fad7f47c256831b640757b873d252e4eb7436cc2: Status 404 returned error can't find the container with id 6bd1291a70d77f3031dcb821fad7f47c256831b640757b873d252e4eb7436cc2 Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.324528 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:20 crc kubenswrapper[4632]: I1201 06:58:20.480618 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.274216 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5cb7c5c954-lttm2" event={"ID":"d67e2874-71ae-4106-80fd-5361439b8ea5","Type":"ContainerStarted","Data":"a7bb95eae31eb45d28419cc96553d63a2cb11923248152e127f5842dea1eb3c0"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.274804 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5cb7c5c954-lttm2" event={"ID":"d67e2874-71ae-4106-80fd-5361439b8ea5","Type":"ContainerStarted","Data":"ab908664375bbe1ab35b7fff9bf3149883903ad83a17d25b0788bf3ea8f104cf"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.274820 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5cb7c5c954-lttm2" event={"ID":"d67e2874-71ae-4106-80fd-5361439b8ea5","Type":"ContainerStarted","Data":"6bd1291a70d77f3031dcb821fad7f47c256831b640757b873d252e4eb7436cc2"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.274841 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.274853 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.277711 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" event={"ID":"16fb6740-33c0-4a6d-8711-34f7520087a5","Type":"ContainerStarted","Data":"4dd7d834e9d86e8e26b149357e545aee1e1bf1f4b7821973f88c26fe1f8942e4"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.286766 4632 generic.go:334] "Generic (PLEG): container finished" podID="08137779-8c6e-49ad-b911-7d4a952d5391" containerID="bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c" exitCode=0 Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.287591 4632 generic.go:334] "Generic (PLEG): container finished" podID="08137779-8c6e-49ad-b911-7d4a952d5391" containerID="aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030" exitCode=2 Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.287753 4632 generic.go:334] "Generic (PLEG): container finished" podID="08137779-8c6e-49ad-b911-7d4a952d5391" containerID="f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e" exitCode=0 Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.286840 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerDied","Data":"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.288169 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerDied","Data":"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.288243 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerDied","Data":"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.291170 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5cffc97f9c-677mp" event={"ID":"aec57a49-c244-4fad-81c2-b29649e62945","Type":"ContainerStarted","Data":"8d7794bec8277240b6e9f30ebe03312dbb9235c9873a7c25a642e7dbbac27cb1"} Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.298658 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5cb7c5c954-lttm2" podStartSLOduration=7.298643643 podStartE2EDuration="7.298643643s" podCreationTimestamp="2025-12-01 06:58:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:21.296666003 +0000 UTC m=+910.861678976" watchObservedRunningTime="2025-12-01 06:58:21.298643643 +0000 UTC m=+910.863656616" Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.323665 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-58b4798-zm5jf" podStartSLOduration=2.486691415 podStartE2EDuration="9.323648123s" podCreationTimestamp="2025-12-01 06:58:12 +0000 UTC" firstStartedPulling="2025-12-01 06:58:13.070908558 +0000 UTC m=+902.635921521" lastFinishedPulling="2025-12-01 06:58:19.907865256 +0000 UTC m=+909.472878229" observedRunningTime="2025-12-01 06:58:21.316303489 +0000 UTC m=+910.881316461" watchObservedRunningTime="2025-12-01 06:58:21.323648123 +0000 UTC m=+910.888661097" Dec 01 06:58:21 crc kubenswrapper[4632]: I1201 06:58:21.338997 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5cffc97f9c-677mp" podStartSLOduration=2.313193002 podStartE2EDuration="9.338977332s" podCreationTimestamp="2025-12-01 06:58:12 +0000 UTC" firstStartedPulling="2025-12-01 06:58:12.864338175 +0000 UTC m=+902.429351148" lastFinishedPulling="2025-12-01 06:58:19.890122505 +0000 UTC m=+909.455135478" observedRunningTime="2025-12-01 06:58:21.331397593 +0000 UTC m=+910.896410566" watchObservedRunningTime="2025-12-01 06:58:21.338977332 +0000 UTC m=+910.903990305" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.091978 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175224 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175341 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175425 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175508 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175611 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175658 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.175683 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz5vl\" (UniqueName: \"kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl\") pod \"08137779-8c6e-49ad-b911-7d4a952d5391\" (UID: \"08137779-8c6e-49ad-b911-7d4a952d5391\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.176043 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.176156 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.176645 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.176665 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/08137779-8c6e-49ad-b911-7d4a952d5391-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.186281 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl" (OuterVolumeSpecName: "kube-api-access-cz5vl") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "kube-api-access-cz5vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.190505 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts" (OuterVolumeSpecName: "scripts") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.201667 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.251643 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.255253 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data" (OuterVolumeSpecName: "config-data") pod "08137779-8c6e-49ad-b911-7d4a952d5391" (UID: "08137779-8c6e-49ad-b911-7d4a952d5391"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.278017 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.278053 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.278064 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz5vl\" (UniqueName: \"kubernetes.io/projected/08137779-8c6e-49ad-b911-7d4a952d5391-kube-api-access-cz5vl\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.278076 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.278086 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08137779-8c6e-49ad-b911-7d4a952d5391-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.309618 4632 generic.go:334] "Generic (PLEG): container finished" podID="08137779-8c6e-49ad-b911-7d4a952d5391" containerID="b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e" exitCode=0 Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.309821 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerDied","Data":"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e"} Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.309873 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"08137779-8c6e-49ad-b911-7d4a952d5391","Type":"ContainerDied","Data":"da3f70df11c7bfe5be7bfc26b019c4c6e68bd5a27a11bbd5384200bdd1da1849"} Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.309837 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.309901 4632 scope.go:117] "RemoveContainer" containerID="bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.310757 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-psmqh" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="registry-server" containerID="cri-o://e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046" gracePeriod=2 Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.342593 4632 scope.go:117] "RemoveContainer" containerID="aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.342703 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.363011 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.363441 4632 scope.go:117] "RemoveContainer" containerID="b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.377206 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.377753 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="proxy-httpd" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.377844 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="proxy-httpd" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.377902 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="sg-core" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.377947 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="sg-core" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.378010 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-central-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378057 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-central-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.378112 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-notification-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378174 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-notification-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378461 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="sg-core" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378526 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-notification-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378583 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="proxy-httpd" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.378655 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" containerName="ceilometer-central-agent" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.380956 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.385652 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.385927 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.386244 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.428176 4632 scope.go:117] "RemoveContainer" containerID="f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.449265 4632 scope.go:117] "RemoveContainer" containerID="bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.449561 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c\": container with ID starting with bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c not found: ID does not exist" containerID="bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.449592 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c"} err="failed to get container status \"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c\": rpc error: code = NotFound desc = could not find container \"bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c\": container with ID starting with bcc55ea3da4be64a0c34e0120eb382aeb7ad27559e0ecd8c8deb87b3276edc7c not found: ID does not exist" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.449613 4632 scope.go:117] "RemoveContainer" containerID="aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.450685 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030\": container with ID starting with aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030 not found: ID does not exist" containerID="aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.450737 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030"} err="failed to get container status \"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030\": rpc error: code = NotFound desc = could not find container \"aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030\": container with ID starting with aa13cd17e11e58b6ebbb5316f3539910bf44bece17270163c1259c06f120e030 not found: ID does not exist" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.450767 4632 scope.go:117] "RemoveContainer" containerID="b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.451216 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e\": container with ID starting with b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e not found: ID does not exist" containerID="b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.451239 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e"} err="failed to get container status \"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e\": rpc error: code = NotFound desc = could not find container \"b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e\": container with ID starting with b35e71f467bda0beae887614f9423ca39b1716c655e26ef6d5d8ea6c5ad9278e not found: ID does not exist" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.451256 4632 scope.go:117] "RemoveContainer" containerID="f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e" Dec 01 06:58:22 crc kubenswrapper[4632]: E1201 06:58:22.451522 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e\": container with ID starting with f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e not found: ID does not exist" containerID="f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.451539 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e"} err="failed to get container status \"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e\": rpc error: code = NotFound desc = could not find container \"f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e\": container with ID starting with f2029f8f9bd4697a543f7bbf87ff14732bba443d93f7d087838e7419cccb465e not found: ID does not exist" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485461 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485532 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485614 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485668 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485704 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwgzr\" (UniqueName: \"kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.485963 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.486043 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.515541 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.561165 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.561438 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="dnsmasq-dns" containerID="cri-o://bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409" gracePeriod=10 Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.587807 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.587849 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwgzr\" (UniqueName: \"kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.587931 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.587966 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.588042 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.588068 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.588092 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.590523 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.590980 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.594512 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.596463 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.596910 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.604824 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.611799 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwgzr\" (UniqueName: \"kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr\") pod \"ceilometer-0\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.701827 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.743243 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.762397 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08137779-8c6e-49ad-b911-7d4a952d5391" path="/var/lib/kubelet/pods/08137779-8c6e-49ad-b911-7d4a952d5391/volumes" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.792933 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities\") pod \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.793015 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65n6x\" (UniqueName: \"kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x\") pod \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.793115 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content\") pod \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\" (UID: \"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04\") " Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.793749 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities" (OuterVolumeSpecName: "utilities") pod "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" (UID: "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.798068 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x" (OuterVolumeSpecName: "kube-api-access-65n6x") pod "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" (UID: "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04"). InnerVolumeSpecName "kube-api-access-65n6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.799938 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.799961 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65n6x\" (UniqueName: \"kubernetes.io/projected/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-kube-api-access-65n6x\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.836088 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" (UID: "a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.902368 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:22 crc kubenswrapper[4632]: I1201 06:58:22.989996 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004103 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004186 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004382 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cchmz\" (UniqueName: \"kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004447 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004604 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.004717 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.040372 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz" (OuterVolumeSpecName: "kube-api-access-cchmz") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "kube-api-access-cchmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.062557 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.087913 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.107418 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108297 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") pod \"77dc536d-ddc2-4490-909c-2f59f184d3ca\" (UID: \"77dc536d-ddc2-4490-909c-2f59f184d3ca\") " Dec 01 06:58:23 crc kubenswrapper[4632]: W1201 06:58:23.108401 4632 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/77dc536d-ddc2-4490-909c-2f59f184d3ca/volumes/kubernetes.io~configmap/ovsdbserver-sb Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108431 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108925 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108947 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108959 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.108973 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cchmz\" (UniqueName: \"kubernetes.io/projected/77dc536d-ddc2-4490-909c-2f59f184d3ca-kube-api-access-cchmz\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.117228 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.127462 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config" (OuterVolumeSpecName: "config") pod "77dc536d-ddc2-4490-909c-2f59f184d3ca" (UID: "77dc536d-ddc2-4490-909c-2f59f184d3ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.195985 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.211027 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.211057 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77dc536d-ddc2-4490-909c-2f59f184d3ca-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.321594 4632 generic.go:334] "Generic (PLEG): container finished" podID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerID="bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409" exitCode=0 Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.321669 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" event={"ID":"77dc536d-ddc2-4490-909c-2f59f184d3ca","Type":"ContainerDied","Data":"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409"} Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.321702 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" event={"ID":"77dc536d-ddc2-4490-909c-2f59f184d3ca","Type":"ContainerDied","Data":"c5f67f04ed454c17782997e274ad6afb6c13de5f204f6fdf4269bf6bb91b66cf"} Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.321723 4632 scope.go:117] "RemoveContainer" containerID="bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.321911 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb95744f5-z46xh" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.331510 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerStarted","Data":"8a68fc7d5a15ae71af0634dd3dc5af4fe0fb689d590eab5588bdc0068d18bbc5"} Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.334848 4632 generic.go:334] "Generic (PLEG): container finished" podID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerID="e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046" exitCode=0 Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.334951 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerDied","Data":"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046"} Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.335000 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-psmqh" event={"ID":"a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04","Type":"ContainerDied","Data":"fcb7596bee82aa4242c8c3435e551e99504e5de2160aa8977d5d46e86d422edf"} Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.334957 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-psmqh" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.365572 4632 scope.go:117] "RemoveContainer" containerID="63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.379193 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.389184 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb95744f5-z46xh"] Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.398636 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.401687 4632 scope.go:117] "RemoveContainer" containerID="bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409" Dec 01 06:58:23 crc kubenswrapper[4632]: E1201 06:58:23.402464 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409\": container with ID starting with bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409 not found: ID does not exist" containerID="bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.402519 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409"} err="failed to get container status \"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409\": rpc error: code = NotFound desc = could not find container \"bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409\": container with ID starting with bd19fbb95d062211fecefc23d5f2a5f16bf90c49450e40d99b139aa5978b5409 not found: ID does not exist" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.402557 4632 scope.go:117] "RemoveContainer" containerID="63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9" Dec 01 06:58:23 crc kubenswrapper[4632]: E1201 06:58:23.403192 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9\": container with ID starting with 63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9 not found: ID does not exist" containerID="63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.403236 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9"} err="failed to get container status \"63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9\": rpc error: code = NotFound desc = could not find container \"63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9\": container with ID starting with 63a5bf2cd88e2b9182fdbe37ac57b50854c7d81d6091078a407e5fccbf573ea9 not found: ID does not exist" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.403296 4632 scope.go:117] "RemoveContainer" containerID="e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.407155 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-psmqh"] Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.435973 4632 scope.go:117] "RemoveContainer" containerID="d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.457412 4632 scope.go:117] "RemoveContainer" containerID="214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.478812 4632 scope.go:117] "RemoveContainer" containerID="e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046" Dec 01 06:58:23 crc kubenswrapper[4632]: E1201 06:58:23.482190 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046\": container with ID starting with e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046 not found: ID does not exist" containerID="e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.482235 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046"} err="failed to get container status \"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046\": rpc error: code = NotFound desc = could not find container \"e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046\": container with ID starting with e9d825f3eb16e6bb08ae8bcd9894426ac3a3ab9eb78fba45b2ab218936b6c046 not found: ID does not exist" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.482265 4632 scope.go:117] "RemoveContainer" containerID="d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec" Dec 01 06:58:23 crc kubenswrapper[4632]: E1201 06:58:23.482639 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec\": container with ID starting with d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec not found: ID does not exist" containerID="d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.482664 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec"} err="failed to get container status \"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec\": rpc error: code = NotFound desc = could not find container \"d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec\": container with ID starting with d58ac23cba20c81ef7e97baf26570de8a576448f3945d4e1b984c4e084d7baec not found: ID does not exist" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.482682 4632 scope.go:117] "RemoveContainer" containerID="214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579" Dec 01 06:58:23 crc kubenswrapper[4632]: E1201 06:58:23.483040 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579\": container with ID starting with 214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579 not found: ID does not exist" containerID="214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579" Dec 01 06:58:23 crc kubenswrapper[4632]: I1201 06:58:23.483086 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579"} err="failed to get container status \"214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579\": rpc error: code = NotFound desc = could not find container \"214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579\": container with ID starting with 214dbe21b234aae3c272f38142d82c6799df255201ca0a04da25a7e903b52579 not found: ID does not exist" Dec 01 06:58:24 crc kubenswrapper[4632]: I1201 06:58:24.087987 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:24 crc kubenswrapper[4632]: I1201 06:58:24.168441 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:24 crc kubenswrapper[4632]: I1201 06:58:24.758495 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" path="/var/lib/kubelet/pods/77dc536d-ddc2-4490-909c-2f59f184d3ca/volumes" Dec 01 06:58:24 crc kubenswrapper[4632]: I1201 06:58:24.759544 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" path="/var/lib/kubelet/pods/a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04/volumes" Dec 01 06:58:25 crc kubenswrapper[4632]: I1201 06:58:25.005821 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:58:25 crc kubenswrapper[4632]: I1201 06:58:25.374827 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerStarted","Data":"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c"} Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.398464 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerStarted","Data":"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a"} Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.399974 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7q66w" event={"ID":"6b176a21-27cf-4608-8787-e91a914be7cb","Type":"ContainerStarted","Data":"0634876edc50405a26c444ff1361dd7f980f6903348fee93dcecbd3687dbf080"} Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.417431 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-7q66w" podStartSLOduration=2.7011840879999998 podStartE2EDuration="46.4174177s" podCreationTimestamp="2025-12-01 06:57:40 +0000 UTC" firstStartedPulling="2025-12-01 06:57:41.761490718 +0000 UTC m=+871.326503691" lastFinishedPulling="2025-12-01 06:58:25.47772433 +0000 UTC m=+915.042737303" observedRunningTime="2025-12-01 06:58:26.416948766 +0000 UTC m=+915.981961739" watchObservedRunningTime="2025-12-01 06:58:26.4174177 +0000 UTC m=+915.982430673" Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.569804 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.570445 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5cb7c5c954-lttm2" Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.637149 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.640848 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-94fdc9c94-ghwxr" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api-log" containerID="cri-o://fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715" gracePeriod=30 Dec 01 06:58:26 crc kubenswrapper[4632]: I1201 06:58:26.641073 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-94fdc9c94-ghwxr" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api" containerID="cri-o://f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e" gracePeriod=30 Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.410492 4632 generic.go:334] "Generic (PLEG): container finished" podID="64b038c3-a697-4612-a2a1-1701578beb7f" containerID="fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715" exitCode=143 Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.410869 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerDied","Data":"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715"} Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.414502 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerStarted","Data":"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5"} Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.480748 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-594d88dfbf-66tbw" Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.534978 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.535217 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-787fbf84bd-fhwb4" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-api" containerID="cri-o://d0cc1061f53bf12326249897324c29a70a53d4386a7ef4c02658a787fbd13f69" gracePeriod=30 Dec 01 06:58:27 crc kubenswrapper[4632]: I1201 06:58:27.535481 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-787fbf84bd-fhwb4" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-httpd" containerID="cri-o://ae197b02fc7549c143e60ac010845904bad62578b7b8ecbf7fcdbbd81ff1207e" gracePeriod=30 Dec 01 06:58:28 crc kubenswrapper[4632]: I1201 06:58:28.427085 4632 generic.go:334] "Generic (PLEG): container finished" podID="979b2304-e919-4eba-8d31-392560a3960a" containerID="ae197b02fc7549c143e60ac010845904bad62578b7b8ecbf7fcdbbd81ff1207e" exitCode=0 Dec 01 06:58:28 crc kubenswrapper[4632]: I1201 06:58:28.427164 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerDied","Data":"ae197b02fc7549c143e60ac010845904bad62578b7b8ecbf7fcdbbd81ff1207e"} Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.350298 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8445898876-gptmm" Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.430405 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-8445898876-gptmm" Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.439101 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerStarted","Data":"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1"} Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.439221 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.440828 4632 generic.go:334] "Generic (PLEG): container finished" podID="6b176a21-27cf-4608-8787-e91a914be7cb" containerID="0634876edc50405a26c444ff1361dd7f980f6903348fee93dcecbd3687dbf080" exitCode=0 Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.440909 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7q66w" event={"ID":"6b176a21-27cf-4608-8787-e91a914be7cb","Type":"ContainerDied","Data":"0634876edc50405a26c444ff1361dd7f980f6903348fee93dcecbd3687dbf080"} Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.500014 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.275020685 podStartE2EDuration="7.499996089s" podCreationTimestamp="2025-12-01 06:58:22 +0000 UTC" firstStartedPulling="2025-12-01 06:58:23.19532465 +0000 UTC m=+912.760337623" lastFinishedPulling="2025-12-01 06:58:28.420300055 +0000 UTC m=+917.985313027" observedRunningTime="2025-12-01 06:58:29.49204612 +0000 UTC m=+919.057059094" watchObservedRunningTime="2025-12-01 06:58:29.499996089 +0000 UTC m=+919.065009061" Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.794014 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-94fdc9c94-ghwxr" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:57686->10.217.0.154:9311: read: connection reset by peer" Dec 01 06:58:29 crc kubenswrapper[4632]: I1201 06:58:29.794057 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-94fdc9c94-ghwxr" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:57688->10.217.0.154:9311: read: connection reset by peer" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.208107 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.269451 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs\") pod \"64b038c3-a697-4612-a2a1-1701578beb7f\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.269510 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data\") pod \"64b038c3-a697-4612-a2a1-1701578beb7f\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.269665 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle\") pod \"64b038c3-a697-4612-a2a1-1701578beb7f\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.269687 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d294m\" (UniqueName: \"kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m\") pod \"64b038c3-a697-4612-a2a1-1701578beb7f\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.269713 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom\") pod \"64b038c3-a697-4612-a2a1-1701578beb7f\" (UID: \"64b038c3-a697-4612-a2a1-1701578beb7f\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.270021 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs" (OuterVolumeSpecName: "logs") pod "64b038c3-a697-4612-a2a1-1701578beb7f" (UID: "64b038c3-a697-4612-a2a1-1701578beb7f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.270584 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64b038c3-a697-4612-a2a1-1701578beb7f-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.275651 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "64b038c3-a697-4612-a2a1-1701578beb7f" (UID: "64b038c3-a697-4612-a2a1-1701578beb7f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.284700 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m" (OuterVolumeSpecName: "kube-api-access-d294m") pod "64b038c3-a697-4612-a2a1-1701578beb7f" (UID: "64b038c3-a697-4612-a2a1-1701578beb7f"). InnerVolumeSpecName "kube-api-access-d294m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.295498 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64b038c3-a697-4612-a2a1-1701578beb7f" (UID: "64b038c3-a697-4612-a2a1-1701578beb7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.313424 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data" (OuterVolumeSpecName: "config-data") pod "64b038c3-a697-4612-a2a1-1701578beb7f" (UID: "64b038c3-a697-4612-a2a1-1701578beb7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.372476 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.372514 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d294m\" (UniqueName: \"kubernetes.io/projected/64b038c3-a697-4612-a2a1-1701578beb7f-kube-api-access-d294m\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.372526 4632 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.372536 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64b038c3-a697-4612-a2a1-1701578beb7f-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.452382 4632 generic.go:334] "Generic (PLEG): container finished" podID="64b038c3-a697-4612-a2a1-1701578beb7f" containerID="f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e" exitCode=0 Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.452596 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-94fdc9c94-ghwxr" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.453469 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerDied","Data":"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e"} Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.453527 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-94fdc9c94-ghwxr" event={"ID":"64b038c3-a697-4612-a2a1-1701578beb7f","Type":"ContainerDied","Data":"d9b1e7c54bbdeb8246b151f4428c9891b7d244a21f55a12e72cea92c2b481c75"} Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.453560 4632 scope.go:117] "RemoveContainer" containerID="f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.492785 4632 scope.go:117] "RemoveContainer" containerID="fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.497211 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.503285 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-94fdc9c94-ghwxr"] Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.527788 4632 scope.go:117] "RemoveContainer" containerID="f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e" Dec 01 06:58:30 crc kubenswrapper[4632]: E1201 06:58:30.528419 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e\": container with ID starting with f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e not found: ID does not exist" containerID="f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.528464 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e"} err="failed to get container status \"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e\": rpc error: code = NotFound desc = could not find container \"f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e\": container with ID starting with f57a2136be7ad2ce51bf3d1ebd9cbb88ce55f3e262eccec03b5728407cbeb17e not found: ID does not exist" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.528493 4632 scope.go:117] "RemoveContainer" containerID="fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715" Dec 01 06:58:30 crc kubenswrapper[4632]: E1201 06:58:30.528821 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715\": container with ID starting with fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715 not found: ID does not exist" containerID="fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.528867 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715"} err="failed to get container status \"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715\": rpc error: code = NotFound desc = could not find container \"fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715\": container with ID starting with fa9cee183059808ff7b8044e7c1e24b5c1b526b9e506082744b5514e9273d715 not found: ID does not exist" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.766555 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" path="/var/lib/kubelet/pods/64b038c3-a697-4612-a2a1-1701578beb7f/volumes" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.770842 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7q66w" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.890841 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.890990 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.891014 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.891068 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6mxl\" (UniqueName: \"kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.891109 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.891178 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle\") pod \"6b176a21-27cf-4608-8787-e91a914be7cb\" (UID: \"6b176a21-27cf-4608-8787-e91a914be7cb\") " Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.891698 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.892102 4632 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6b176a21-27cf-4608-8787-e91a914be7cb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.895522 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts" (OuterVolumeSpecName: "scripts") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.895541 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.895609 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl" (OuterVolumeSpecName: "kube-api-access-z6mxl") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "kube-api-access-z6mxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.913944 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.936114 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data" (OuterVolumeSpecName: "config-data") pod "6b176a21-27cf-4608-8787-e91a914be7cb" (UID: "6b176a21-27cf-4608-8787-e91a914be7cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.993097 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.993275 4632 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.993387 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.993473 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6mxl\" (UniqueName: \"kubernetes.io/projected/6b176a21-27cf-4608-8787-e91a914be7cb-kube-api-access-z6mxl\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:30 crc kubenswrapper[4632]: I1201 06:58:30.994699 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b176a21-27cf-4608-8787-e91a914be7cb-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.462364 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-7q66w" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.462316 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-7q66w" event={"ID":"6b176a21-27cf-4608-8787-e91a914be7cb","Type":"ContainerDied","Data":"55d1ec05cb7d6154b8e3d83453c6e3c80bbb34ecac0dfdb4976d92c4dffb54a7"} Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.463887 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55d1ec05cb7d6154b8e3d83453c6e3c80bbb34ecac0dfdb4976d92c4dffb54a7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.784925 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.785548 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="extract-utilities" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.785630 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="extract-utilities" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.785703 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="registry-server" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.785759 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="registry-server" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.785817 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.785870 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.785922 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api-log" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.785970 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api-log" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.786033 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="dnsmasq-dns" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786078 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="dnsmasq-dns" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.786125 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="init" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786186 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="init" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.786234 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" containerName="cinder-db-sync" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786275 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" containerName="cinder-db-sync" Dec 01 06:58:31 crc kubenswrapper[4632]: E1201 06:58:31.786333 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="extract-content" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786463 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="extract-content" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786713 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="77dc536d-ddc2-4490-909c-2f59f184d3ca" containerName="dnsmasq-dns" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786788 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a21bbafa-4cfb-4dc7-8fc1-16c567a9bb04" containerName="registry-server" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786839 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api-log" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786898 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b038c3-a697-4612-a2a1-1701578beb7f" containerName="barbican-api" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.786955 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" containerName="cinder-db-sync" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.787957 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.791219 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.791243 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-hz9v6" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.791225 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.791236 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.792930 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.837324 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.847509 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.914557 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.921572 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.921758 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xqwt\" (UniqueName: \"kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.921879 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.921971 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922049 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7m6gb\" (UniqueName: \"kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922142 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922254 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922323 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922483 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922560 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.922681 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:31 crc kubenswrapper[4632]: I1201 06:58:31.923241 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032762 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032816 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032884 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xqwt\" (UniqueName: \"kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032928 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032963 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.032983 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7m6gb\" (UniqueName: \"kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033019 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033053 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033076 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033144 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033213 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.033276 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.035102 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.035379 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.035456 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.036257 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.036875 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.037281 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.041593 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.051251 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.053055 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.059275 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xqwt\" (UniqueName: \"kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.063215 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data\") pod \"cinder-scheduler-0\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.065921 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7m6gb\" (UniqueName: \"kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb\") pod \"dnsmasq-dns-68c76996b9-8gcj7\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.066394 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.068034 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.070721 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.107619 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.117007 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135388 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135436 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135466 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135638 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfgnl\" (UniqueName: \"kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135669 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135698 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.135734 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.214591 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237314 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237380 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237420 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237454 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237613 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfgnl\" (UniqueName: \"kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237647 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237681 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.237719 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.238337 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.244046 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.244171 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.244232 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.244549 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.266834 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfgnl\" (UniqueName: \"kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl\") pod \"cinder-api-0\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.324372 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.480095 4632 generic.go:334] "Generic (PLEG): container finished" podID="979b2304-e919-4eba-8d31-392560a3960a" containerID="d0cc1061f53bf12326249897324c29a70a53d4386a7ef4c02658a787fbd13f69" exitCode=0 Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.480394 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerDied","Data":"d0cc1061f53bf12326249897324c29a70a53d4386a7ef4c02658a787fbd13f69"} Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.495017 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.546887 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config\") pod \"979b2304-e919-4eba-8d31-392560a3960a\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.547066 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmdb4\" (UniqueName: \"kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4\") pod \"979b2304-e919-4eba-8d31-392560a3960a\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.547251 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs\") pod \"979b2304-e919-4eba-8d31-392560a3960a\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.547292 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config\") pod \"979b2304-e919-4eba-8d31-392560a3960a\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.547411 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle\") pod \"979b2304-e919-4eba-8d31-392560a3960a\" (UID: \"979b2304-e919-4eba-8d31-392560a3960a\") " Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.552379 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "979b2304-e919-4eba-8d31-392560a3960a" (UID: "979b2304-e919-4eba-8d31-392560a3960a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.553531 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4" (OuterVolumeSpecName: "kube-api-access-mmdb4") pod "979b2304-e919-4eba-8d31-392560a3960a" (UID: "979b2304-e919-4eba-8d31-392560a3960a"). InnerVolumeSpecName "kube-api-access-mmdb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.607596 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "979b2304-e919-4eba-8d31-392560a3960a" (UID: "979b2304-e919-4eba-8d31-392560a3960a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.618409 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config" (OuterVolumeSpecName: "config") pod "979b2304-e919-4eba-8d31-392560a3960a" (UID: "979b2304-e919-4eba-8d31-392560a3960a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.643583 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "979b2304-e919-4eba-8d31-392560a3960a" (UID: "979b2304-e919-4eba-8d31-392560a3960a"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.650856 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.650888 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.650898 4632 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.650909 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmdb4\" (UniqueName: \"kubernetes.io/projected/979b2304-e919-4eba-8d31-392560a3960a-kube-api-access-mmdb4\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.650921 4632 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/979b2304-e919-4eba-8d31-392560a3960a-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.669095 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.779120 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:58:32 crc kubenswrapper[4632]: W1201 06:58:32.842434 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94975672_ef3d_4fe8_a849_39f93ee9c123.slice/crio-fb7589ab4a4ca7f6b387bce58e8967f2823433546c4c18cb39a13b151448435b WatchSource:0}: Error finding container fb7589ab4a4ca7f6b387bce58e8967f2823433546c4c18cb39a13b151448435b: Status 404 returned error can't find the container with id fb7589ab4a4ca7f6b387bce58e8967f2823433546c4c18cb39a13b151448435b Dec 01 06:58:32 crc kubenswrapper[4632]: I1201 06:58:32.844776 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.502672 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-787fbf84bd-fhwb4" Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.502661 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-787fbf84bd-fhwb4" event={"ID":"979b2304-e919-4eba-8d31-392560a3960a","Type":"ContainerDied","Data":"f2af2f4b7ffdcb58796cfeb92df91b8aa3553d61892a75e3a4e572cff4984686"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.503239 4632 scope.go:117] "RemoveContainer" containerID="ae197b02fc7549c143e60ac010845904bad62578b7b8ecbf7fcdbbd81ff1207e" Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.506534 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerStarted","Data":"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.506791 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerStarted","Data":"fb7589ab4a4ca7f6b387bce58e8967f2823433546c4c18cb39a13b151448435b"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.508263 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerStarted","Data":"9662e899e160397dbdd50f9172a9ec256bc2cc6220ab520ef8984380e9440117"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.510864 4632 generic.go:334] "Generic (PLEG): container finished" podID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerID="2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59" exitCode=0 Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.510900 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" event={"ID":"e967624e-2612-4b89-ae6c-44b3b914b8ad","Type":"ContainerDied","Data":"2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.510915 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" event={"ID":"e967624e-2612-4b89-ae6c-44b3b914b8ad","Type":"ContainerStarted","Data":"788af742cd5d2cab8011bb4b041935580276587cc0c5cc4338a684b5105cc6ff"} Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.531695 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.543576 4632 scope.go:117] "RemoveContainer" containerID="d0cc1061f53bf12326249897324c29a70a53d4386a7ef4c02658a787fbd13f69" Dec 01 06:58:33 crc kubenswrapper[4632]: I1201 06:58:33.546328 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-787fbf84bd-fhwb4"] Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.218792 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.532033 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerStarted","Data":"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a"} Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.535627 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" event={"ID":"e967624e-2612-4b89-ae6c-44b3b914b8ad","Type":"ContainerStarted","Data":"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd"} Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.536078 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.556177 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerStarted","Data":"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe"} Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.556764 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.589071 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" podStartSLOduration=3.58905584 podStartE2EDuration="3.58905584s" podCreationTimestamp="2025-12-01 06:58:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:34.577820044 +0000 UTC m=+924.142833017" watchObservedRunningTime="2025-12-01 06:58:34.58905584 +0000 UTC m=+924.154068813" Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.628595 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.628580202 podStartE2EDuration="2.628580202s" podCreationTimestamp="2025-12-01 06:58:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:34.606864656 +0000 UTC m=+924.171877649" watchObservedRunningTime="2025-12-01 06:58:34.628580202 +0000 UTC m=+924.193593175" Dec 01 06:58:34 crc kubenswrapper[4632]: I1201 06:58:34.790427 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="979b2304-e919-4eba-8d31-392560a3960a" path="/var/lib/kubelet/pods/979b2304-e919-4eba-8d31-392560a3960a/volumes" Dec 01 06:58:35 crc kubenswrapper[4632]: I1201 06:58:35.566952 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerStarted","Data":"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a"} Dec 01 06:58:35 crc kubenswrapper[4632]: I1201 06:58:35.567141 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api-log" containerID="cri-o://e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" gracePeriod=30 Dec 01 06:58:35 crc kubenswrapper[4632]: I1201 06:58:35.567423 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api" containerID="cri-o://a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" gracePeriod=30 Dec 01 06:58:35 crc kubenswrapper[4632]: I1201 06:58:35.596674 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.716208202 podStartE2EDuration="4.596656928s" podCreationTimestamp="2025-12-01 06:58:31 +0000 UTC" firstStartedPulling="2025-12-01 06:58:32.673278238 +0000 UTC m=+922.238291211" lastFinishedPulling="2025-12-01 06:58:33.553726964 +0000 UTC m=+923.118739937" observedRunningTime="2025-12-01 06:58:35.589294508 +0000 UTC m=+925.154307481" watchObservedRunningTime="2025-12-01 06:58:35.596656928 +0000 UTC m=+925.161669901" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.125747 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232012 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfgnl\" (UniqueName: \"kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232386 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232413 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232448 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232518 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232585 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232697 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs" (OuterVolumeSpecName: "logs") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232715 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id\") pod \"94975672-ef3d-4fe8-a849-39f93ee9c123\" (UID: \"94975672-ef3d-4fe8-a849-39f93ee9c123\") " Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.232749 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.233620 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94975672-ef3d-4fe8-a849-39f93ee9c123-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.233647 4632 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94975672-ef3d-4fe8-a849-39f93ee9c123-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.238242 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl" (OuterVolumeSpecName: "kube-api-access-wfgnl") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "kube-api-access-wfgnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.238312 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts" (OuterVolumeSpecName: "scripts") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.238663 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.259286 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.276790 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data" (OuterVolumeSpecName: "config-data") pod "94975672-ef3d-4fe8-a849-39f93ee9c123" (UID: "94975672-ef3d-4fe8-a849-39f93ee9c123"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.335450 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfgnl\" (UniqueName: \"kubernetes.io/projected/94975672-ef3d-4fe8-a849-39f93ee9c123-kube-api-access-wfgnl\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.335482 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.335494 4632 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.335502 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.335510 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94975672-ef3d-4fe8-a849-39f93ee9c123-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581721 4632 generic.go:334] "Generic (PLEG): container finished" podID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerID="a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" exitCode=0 Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581763 4632 generic.go:334] "Generic (PLEG): container finished" podID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerID="e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" exitCode=143 Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581803 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581838 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerDied","Data":"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe"} Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581908 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerDied","Data":"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9"} Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581923 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94975672-ef3d-4fe8-a849-39f93ee9c123","Type":"ContainerDied","Data":"fb7589ab4a4ca7f6b387bce58e8967f2823433546c4c18cb39a13b151448435b"} Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.581943 4632 scope.go:117] "RemoveContainer" containerID="a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.610971 4632 scope.go:117] "RemoveContainer" containerID="e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.615592 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.621420 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.630806 4632 scope.go:117] "RemoveContainer" containerID="a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.631176 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe\": container with ID starting with a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe not found: ID does not exist" containerID="a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.631218 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe"} err="failed to get container status \"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe\": rpc error: code = NotFound desc = could not find container \"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe\": container with ID starting with a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe not found: ID does not exist" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.631247 4632 scope.go:117] "RemoveContainer" containerID="e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.632003 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9\": container with ID starting with e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9 not found: ID does not exist" containerID="e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.632038 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9"} err="failed to get container status \"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9\": rpc error: code = NotFound desc = could not find container \"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9\": container with ID starting with e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9 not found: ID does not exist" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.632061 4632 scope.go:117] "RemoveContainer" containerID="a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.632294 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe"} err="failed to get container status \"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe\": rpc error: code = NotFound desc = could not find container \"a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe\": container with ID starting with a55dd0e3ec4169de12f96804c73ec4ea73bf55653ae07503cef9b760714ae2fe not found: ID does not exist" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.632317 4632 scope.go:117] "RemoveContainer" containerID="e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.632578 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9"} err="failed to get container status \"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9\": rpc error: code = NotFound desc = could not find container \"e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9\": container with ID starting with e62200e62db1cb30e7b594ee807faa682379ef3eab7fd70cf58bb77ed5ad94d9 not found: ID does not exist" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.639862 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.640261 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api-log" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640283 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api-log" Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.640306 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640313 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api" Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.640329 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-api" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640335 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-api" Dec 01 06:58:36 crc kubenswrapper[4632]: E1201 06:58:36.640344 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-httpd" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640365 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-httpd" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640573 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-api" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640602 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api-log" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640614 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="979b2304-e919-4eba-8d31-392560a3960a" containerName="neutron-httpd" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.640621 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" containerName="cinder-api" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.641596 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.645250 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.646945 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.647325 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.660980 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.743927 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.743993 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-scripts\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744033 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744223 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data-custom\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744313 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744448 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv595\" (UniqueName: \"kubernetes.io/projected/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-kube-api-access-wv595\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744486 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744533 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-logs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.744752 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.759636 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94975672-ef3d-4fe8-a849-39f93ee9c123" path="/var/lib/kubelet/pods/94975672-ef3d-4fe8-a849-39f93ee9c123/volumes" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847176 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847270 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847393 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-scripts\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847461 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847515 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data-custom\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847553 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847616 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv595\" (UniqueName: \"kubernetes.io/projected/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-kube-api-access-wv595\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847645 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847686 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-logs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.847933 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.848316 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-logs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.852977 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-scripts\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.853315 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.853437 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.853714 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.857235 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data-custom\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.857910 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-config-data\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.864391 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv595\" (UniqueName: \"kubernetes.io/projected/0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3-kube-api-access-wv595\") pod \"cinder-api-0\" (UID: \"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3\") " pod="openstack/cinder-api-0" Dec 01 06:58:36 crc kubenswrapper[4632]: I1201 06:58:36.960563 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 01 06:58:37 crc kubenswrapper[4632]: I1201 06:58:37.117384 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 06:58:37 crc kubenswrapper[4632]: I1201 06:58:37.371990 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 01 06:58:37 crc kubenswrapper[4632]: W1201 06:58:37.377210 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fe0cba0_4ef0_4e11_8f54_29fa4b4518b3.slice/crio-9d5617d335ea7cb7884c26fd943efaba9ad0e0388d79d4926067724c71456240 WatchSource:0}: Error finding container 9d5617d335ea7cb7884c26fd943efaba9ad0e0388d79d4926067724c71456240: Status 404 returned error can't find the container with id 9d5617d335ea7cb7884c26fd943efaba9ad0e0388d79d4926067724c71456240 Dec 01 06:58:37 crc kubenswrapper[4632]: I1201 06:58:37.593390 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3","Type":"ContainerStarted","Data":"9d5617d335ea7cb7884c26fd943efaba9ad0e0388d79d4926067724c71456240"} Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.603761 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3","Type":"ContainerStarted","Data":"7724da4b9ccc1ab4920b1cc88cf1734c5014b16b4ca4c9f1c7d40a24b277cc82"} Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.604104 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3","Type":"ContainerStarted","Data":"df3a113bc005df529390a0b5e8324aba387a016a8b4b032037fa7c9d4e441797"} Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.604127 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.622236 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=2.622219847 podStartE2EDuration="2.622219847s" podCreationTimestamp="2025-12-01 06:58:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:38.618350206 +0000 UTC m=+928.183363179" watchObservedRunningTime="2025-12-01 06:58:38.622219847 +0000 UTC m=+928.187232820" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.764221 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-5d8c896fc4-b4shb" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.969965 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.971240 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.973130 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.973654 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.977587 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6cc4n" Dec 01 06:58:38 crc kubenswrapper[4632]: I1201 06:58:38.979970 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.004029 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88g5r\" (UniqueName: \"kubernetes.io/projected/c29a6239-304a-4a40-8e32-35dfb513bb8f-kube-api-access-88g5r\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.004180 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.004302 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.004410 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.106311 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88g5r\" (UniqueName: \"kubernetes.io/projected/c29a6239-304a-4a40-8e32-35dfb513bb8f-kube-api-access-88g5r\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.106634 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.106695 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.106733 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.107525 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.111293 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-openstack-config-secret\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.112006 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c29a6239-304a-4a40-8e32-35dfb513bb8f-combined-ca-bundle\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.121494 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88g5r\" (UniqueName: \"kubernetes.io/projected/c29a6239-304a-4a40-8e32-35dfb513bb8f-kube-api-access-88g5r\") pod \"openstackclient\" (UID: \"c29a6239-304a-4a40-8e32-35dfb513bb8f\") " pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.287199 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 01 06:58:39 crc kubenswrapper[4632]: I1201 06:58:39.691341 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 01 06:58:39 crc kubenswrapper[4632]: W1201 06:58:39.698033 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc29a6239_304a_4a40_8e32_35dfb513bb8f.slice/crio-8d46db3a6f27e59eec911ad74723e04ee34dd06cb6947017d042fb223f7172cf WatchSource:0}: Error finding container 8d46db3a6f27e59eec911ad74723e04ee34dd06cb6947017d042fb223f7172cf: Status 404 returned error can't find the container with id 8d46db3a6f27e59eec911ad74723e04ee34dd06cb6947017d042fb223f7172cf Dec 01 06:58:40 crc kubenswrapper[4632]: I1201 06:58:40.623966 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c29a6239-304a-4a40-8e32-35dfb513bb8f","Type":"ContainerStarted","Data":"8d46db3a6f27e59eec911ad74723e04ee34dd06cb6947017d042fb223f7172cf"} Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.916394 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-7697b7f499-t6njt"] Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.918344 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.925756 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.925993 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.926124 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.927740 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7697b7f499-t6njt"] Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970474 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-internal-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970517 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-log-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970546 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdzsx\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-kube-api-access-fdzsx\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970569 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-combined-ca-bundle\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970599 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-config-data\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970692 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-etc-swift\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.970750 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-public-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:41 crc kubenswrapper[4632]: I1201 06:58:41.971086 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-run-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073174 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-internal-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073278 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-log-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073313 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdzsx\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-kube-api-access-fdzsx\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073346 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-combined-ca-bundle\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073399 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-config-data\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073498 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-etc-swift\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073560 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-public-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073781 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-run-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.073828 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-log-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.074271 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e090ffa7-e9ce-46e9-97e8-8e38155d9241-run-httpd\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.081086 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-combined-ca-bundle\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.083814 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-internal-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.089229 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-public-tls-certs\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.089249 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-etc-swift\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.116172 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e090ffa7-e9ce-46e9-97e8-8e38155d9241-config-data\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.120057 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdzsx\" (UniqueName: \"kubernetes.io/projected/e090ffa7-e9ce-46e9-97e8-8e38155d9241-kube-api-access-fdzsx\") pod \"swift-proxy-7697b7f499-t6njt\" (UID: \"e090ffa7-e9ce-46e9-97e8-8e38155d9241\") " pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.215540 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.249304 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.272562 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.272785 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="dnsmasq-dns" containerID="cri-o://055b28008e0b9548da1f2d6efdde8a10be6215cb1aee42a2d6539007ca8c8e33" gracePeriod=10 Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.354249 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.427933 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.505923 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.152:5353: connect: connection refused" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.653815 4632 generic.go:334] "Generic (PLEG): container finished" podID="9acc4130-8f32-48b6-8174-2062cefef802" containerID="055b28008e0b9548da1f2d6efdde8a10be6215cb1aee42a2d6539007ca8c8e33" exitCode=0 Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.654069 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" event={"ID":"9acc4130-8f32-48b6-8174-2062cefef802","Type":"ContainerDied","Data":"055b28008e0b9548da1f2d6efdde8a10be6215cb1aee42a2d6539007ca8c8e33"} Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.654258 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="cinder-scheduler" containerID="cri-o://f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a" gracePeriod=30 Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.654662 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="probe" containerID="cri-o://02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a" gracePeriod=30 Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.802689 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.846556 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-7697b7f499-t6njt"] Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893302 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdcqb\" (UniqueName: \"kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893401 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893423 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893596 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893723 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.893774 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config\") pod \"9acc4130-8f32-48b6-8174-2062cefef802\" (UID: \"9acc4130-8f32-48b6-8174-2062cefef802\") " Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.910151 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb" (OuterVolumeSpecName: "kube-api-access-xdcqb") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "kube-api-access-xdcqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.936828 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.941483 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config" (OuterVolumeSpecName: "config") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.945347 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.946990 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.956589 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9acc4130-8f32-48b6-8174-2062cefef802" (UID: "9acc4130-8f32-48b6-8174-2062cefef802"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995926 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995948 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995958 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995968 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995979 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9acc4130-8f32-48b6-8174-2062cefef802-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:42 crc kubenswrapper[4632]: I1201 06:58:42.995989 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdcqb\" (UniqueName: \"kubernetes.io/projected/9acc4130-8f32-48b6-8174-2062cefef802-kube-api-access-xdcqb\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.149027 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.149711 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="proxy-httpd" containerID="cri-o://32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1" gracePeriod=30 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.149942 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="sg-core" containerID="cri-o://38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5" gracePeriod=30 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.149987 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-notification-agent" containerID="cri-o://70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a" gracePeriod=30 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.150203 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-central-agent" containerID="cri-o://ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c" gracePeriod=30 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.175091 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.156:3000/\": EOF" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.664426 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.664447 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56d8484fc-ddw26" event={"ID":"9acc4130-8f32-48b6-8174-2062cefef802","Type":"ContainerDied","Data":"3e1f93637b675ccd248669f621423eddf33d07ea762782a73eee4ee9df38aabc"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.664516 4632 scope.go:117] "RemoveContainer" containerID="055b28008e0b9548da1f2d6efdde8a10be6215cb1aee42a2d6539007ca8c8e33" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.667283 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7697b7f499-t6njt" event={"ID":"e090ffa7-e9ce-46e9-97e8-8e38155d9241","Type":"ContainerStarted","Data":"f68231ec8ec86d3cd7f161cb04b3ef960066e14550e1028023b8891eb83b67e1"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.667344 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7697b7f499-t6njt" event={"ID":"e090ffa7-e9ce-46e9-97e8-8e38155d9241","Type":"ContainerStarted","Data":"ed5a628eb0ddc6543f7ed7a9ffbeb58121aa24f1fdd1bc3ee9430ed880f8105c"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.667394 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-7697b7f499-t6njt" event={"ID":"e090ffa7-e9ce-46e9-97e8-8e38155d9241","Type":"ContainerStarted","Data":"fae5bef429de08494997bd06405c4dfb49cd238f72b29acc910b23bda1b166b0"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.667524 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.667564 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.673864 4632 generic.go:334] "Generic (PLEG): container finished" podID="599bd62f-6f31-4255-b3b2-083faf500f85" containerID="02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a" exitCode=0 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.673937 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerDied","Data":"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680450 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc95ee19-cd3d-4f92-accc-637656c66069" containerID="32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1" exitCode=0 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680483 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc95ee19-cd3d-4f92-accc-637656c66069" containerID="38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5" exitCode=2 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680493 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc95ee19-cd3d-4f92-accc-637656c66069" containerID="ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c" exitCode=0 Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680517 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerDied","Data":"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680557 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerDied","Data":"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.680568 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerDied","Data":"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c"} Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.688964 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-fnpks"] Dec 01 06:58:43 crc kubenswrapper[4632]: E1201 06:58:43.689339 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="init" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.689374 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="init" Dec 01 06:58:43 crc kubenswrapper[4632]: E1201 06:58:43.689407 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="dnsmasq-dns" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.689416 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="dnsmasq-dns" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.689599 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9acc4130-8f32-48b6-8174-2062cefef802" containerName="dnsmasq-dns" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.690322 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.693513 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-7697b7f499-t6njt" podStartSLOduration=2.693495248 podStartE2EDuration="2.693495248s" podCreationTimestamp="2025-12-01 06:58:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:43.689275117 +0000 UTC m=+933.254288100" watchObservedRunningTime="2025-12-01 06:58:43.693495248 +0000 UTC m=+933.258508222" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.698520 4632 scope.go:117] "RemoveContainer" containerID="e5c9e1965713c2b81e566d85c34e435aaa7dbe1b7ee89afb533aac5dd51d51b2" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.724015 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.731770 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-fnpks"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.739079 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56d8484fc-ddw26"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.819231 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7xlw\" (UniqueName: \"kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.819319 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.820223 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-46f1-account-create-update-5zv72"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.822298 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.824334 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.831381 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-pmhkj"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.833327 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.855940 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pmhkj"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.881163 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-46f1-account-create-update-5zv72"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.917536 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-x94rl"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.918918 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921025 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rkln\" (UniqueName: \"kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921136 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921191 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7xlw\" (UniqueName: \"kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921238 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921287 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w74st\" (UniqueName: \"kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921312 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.921920 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.926123 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x94rl"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.942056 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7xlw\" (UniqueName: \"kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw\") pod \"nova-api-db-create-fnpks\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.990005 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-05bc-account-create-update-58k9r"] Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.993113 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:43 crc kubenswrapper[4632]: I1201 06:58:43.995709 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.000704 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-05bc-account-create-update-58k9r"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.010292 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.023942 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w74st\" (UniqueName: \"kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.023998 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rkln\" (UniqueName: \"kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.024044 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.024093 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wsxjc\" (UniqueName: \"kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.024169 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.024260 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.024876 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.026002 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.038664 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rkln\" (UniqueName: \"kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln\") pod \"nova-cell0-db-create-pmhkj\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.039097 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w74st\" (UniqueName: \"kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st\") pod \"nova-api-46f1-account-create-update-5zv72\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.126302 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.126385 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wsxjc\" (UniqueName: \"kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.126460 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-792pd\" (UniqueName: \"kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.126501 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.127144 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.147471 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wsxjc\" (UniqueName: \"kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc\") pod \"nova-cell1-db-create-x94rl\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.158113 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.187666 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.221392 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-05f8-account-create-update-wr5n9"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.222762 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.226159 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.228890 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-792pd\" (UniqueName: \"kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.228925 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.229598 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.235751 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-05f8-account-create-update-wr5n9"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.261794 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.276905 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-792pd\" (UniqueName: \"kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd\") pod \"nova-cell0-05bc-account-create-update-58k9r\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.325760 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.334716 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.334953 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctmcv\" (UniqueName: \"kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.438500 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.438685 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctmcv\" (UniqueName: \"kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.439688 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.463887 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctmcv\" (UniqueName: \"kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv\") pod \"nova-cell1-05f8-account-create-update-wr5n9\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.627488 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-fnpks"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.694647 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.695387 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fnpks" event={"ID":"484f5acc-9309-4561-8055-1fc5df33e183","Type":"ContainerStarted","Data":"29f3ab514fb03c6998d98094f5f929b356563fb29b8a664143f9a64cea921bf4"} Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.763034 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9acc4130-8f32-48b6-8174-2062cefef802" path="/var/lib/kubelet/pods/9acc4130-8f32-48b6-8174-2062cefef802/volumes" Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.902045 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-pmhkj"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.986104 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-x94rl"] Dec 01 06:58:44 crc kubenswrapper[4632]: I1201 06:58:44.995071 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-05bc-account-create-update-58k9r"] Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.002100 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-46f1-account-create-update-5zv72"] Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.227925 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-05f8-account-create-update-wr5n9"] Dec 01 06:58:45 crc kubenswrapper[4632]: W1201 06:58:45.249405 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod227dea0e_053e_4c4e_a209_5b6ad4f9145f.slice/crio-1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe WatchSource:0}: Error finding container 1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe: Status 404 returned error can't find the container with id 1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.383807 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.471722 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xqwt\" (UniqueName: \"kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.471864 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.471890 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.471989 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.472023 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.472080 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle\") pod \"599bd62f-6f31-4255-b3b2-083faf500f85\" (UID: \"599bd62f-6f31-4255-b3b2-083faf500f85\") " Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.474464 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.489970 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.490476 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts" (OuterVolumeSpecName: "scripts") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.494320 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt" (OuterVolumeSpecName: "kube-api-access-2xqwt") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "kube-api-access-2xqwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.575313 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xqwt\" (UniqueName: \"kubernetes.io/projected/599bd62f-6f31-4255-b3b2-083faf500f85-kube-api-access-2xqwt\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.575343 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.575368 4632 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/599bd62f-6f31-4255-b3b2-083faf500f85-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.575377 4632 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.609278 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.636468 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data" (OuterVolumeSpecName: "config-data") pod "599bd62f-6f31-4255-b3b2-083faf500f85" (UID: "599bd62f-6f31-4255-b3b2-083faf500f85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.677755 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.677783 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/599bd62f-6f31-4255-b3b2-083faf500f85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.725633 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-46f1-account-create-update-5zv72" event={"ID":"b7cf908d-5642-4d23-9874-d4b7b1d3f323","Type":"ContainerStarted","Data":"db63ecd323b208a59bae25a8381222ab52945ec5c6a583c001bcd6c7f2d37fde"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.725691 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-46f1-account-create-update-5zv72" event={"ID":"b7cf908d-5642-4d23-9874-d4b7b1d3f323","Type":"ContainerStarted","Data":"02e4941b7ee69bc992b0a8be06b8d54df60a6955034394542055cface56dc1d0"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.736957 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" event={"ID":"227dea0e-053e-4c4e-a209-5b6ad4f9145f","Type":"ContainerStarted","Data":"3b2c01c075f3736e44a5a7538a150d0f10e59fd5da228d13e67c52ef1427ee7c"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.736994 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" event={"ID":"227dea0e-053e-4c4e-a209-5b6ad4f9145f","Type":"ContainerStarted","Data":"1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.752506 4632 generic.go:334] "Generic (PLEG): container finished" podID="599bd62f-6f31-4255-b3b2-083faf500f85" containerID="f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a" exitCode=0 Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.752589 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.752611 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerDied","Data":"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.752912 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"599bd62f-6f31-4255-b3b2-083faf500f85","Type":"ContainerDied","Data":"9662e899e160397dbdd50f9172a9ec256bc2cc6220ab520ef8984380e9440117"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.752937 4632 scope.go:117] "RemoveContainer" containerID="02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.756313 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" event={"ID":"e705f257-798d-43b0-985c-bf3499b2c720","Type":"ContainerStarted","Data":"5217b31c470a73a8dc7553d7244bbee4649cf93862343b9d4e3ba4aa6a4f45f5"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.756345 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" event={"ID":"e705f257-798d-43b0-985c-bf3499b2c720","Type":"ContainerStarted","Data":"22c2714ad31a3e086f40fda3f828b86f92246fa336314db6423d4dd6cfa0a5e5"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.756529 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-46f1-account-create-update-5zv72" podStartSLOduration=2.756504941 podStartE2EDuration="2.756504941s" podCreationTimestamp="2025-12-01 06:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:45.747785653 +0000 UTC m=+935.312798626" watchObservedRunningTime="2025-12-01 06:58:45.756504941 +0000 UTC m=+935.321517914" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.767299 4632 generic.go:334] "Generic (PLEG): container finished" podID="484f5acc-9309-4561-8055-1fc5df33e183" containerID="72a98ac041315821beeacd8b6aed3148c7431cb17a040fd888d59cca93023c1b" exitCode=0 Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.767408 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fnpks" event={"ID":"484f5acc-9309-4561-8055-1fc5df33e183","Type":"ContainerDied","Data":"72a98ac041315821beeacd8b6aed3148c7431cb17a040fd888d59cca93023c1b"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.772871 4632 generic.go:334] "Generic (PLEG): container finished" podID="fbecd90a-c90a-4fe8-a349-7701b7256955" containerID="ca6343fb56d2fd7a8d303446038bc39577fcc8c4e652137e785a17f43f86ec49" exitCode=0 Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.772934 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pmhkj" event={"ID":"fbecd90a-c90a-4fe8-a349-7701b7256955","Type":"ContainerDied","Data":"ca6343fb56d2fd7a8d303446038bc39577fcc8c4e652137e785a17f43f86ec49"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.772954 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pmhkj" event={"ID":"fbecd90a-c90a-4fe8-a349-7701b7256955","Type":"ContainerStarted","Data":"843b75a5ff8ffec84ee07d73b4018920a40451567d48816d0740f1b3c039e63f"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.776850 4632 scope.go:117] "RemoveContainer" containerID="f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.777691 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" podStartSLOduration=1.777675329 podStartE2EDuration="1.777675329s" podCreationTimestamp="2025-12-01 06:58:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:45.762417806 +0000 UTC m=+935.327430780" watchObservedRunningTime="2025-12-01 06:58:45.777675329 +0000 UTC m=+935.342688302" Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.782284 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x94rl" event={"ID":"80322b90-d8de-4544-8962-3761a3d13e03","Type":"ContainerStarted","Data":"c408ae4834ffcbb51601338cd6039690e9278fb564e1d9926a1d5c4150f2883b"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.782313 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x94rl" event={"ID":"80322b90-d8de-4544-8962-3761a3d13e03","Type":"ContainerStarted","Data":"fe328eacb6766bc84d552d9da198e0f786c5fe2896dd1477e0ecf5d3c7aeac5e"} Dec 01 06:58:45 crc kubenswrapper[4632]: I1201 06:58:45.789377 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" podStartSLOduration=2.789366223 podStartE2EDuration="2.789366223s" podCreationTimestamp="2025-12-01 06:58:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:45.775476792 +0000 UTC m=+935.340489765" watchObservedRunningTime="2025-12-01 06:58:45.789366223 +0000 UTC m=+935.354379197" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.165410 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.182486 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.183178 4632 scope.go:117] "RemoveContainer" containerID="02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.191986 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a\": container with ID starting with 02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a not found: ID does not exist" containerID="02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.192029 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a"} err="failed to get container status \"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a\": rpc error: code = NotFound desc = could not find container \"02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a\": container with ID starting with 02fb4e95e5799b5c83df465859374f1cd5513409e0930616ae07479432a4707a not found: ID does not exist" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.192055 4632 scope.go:117] "RemoveContainer" containerID="f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.195506 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a\": container with ID starting with f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a not found: ID does not exist" containerID="f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.195540 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a"} err="failed to get container status \"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a\": rpc error: code = NotFound desc = could not find container \"f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a\": container with ID starting with f7d7155698776281c0862f91ebf31b82b0a20c13d5112d734e92fa7e62bcb09a not found: ID does not exist" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.211132 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.211763 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="cinder-scheduler" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.211789 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="cinder-scheduler" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.211809 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="probe" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.211817 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="probe" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.212019 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="cinder-scheduler" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.212042 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" containerName="probe" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.213202 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.218371 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.230454 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.297820 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.298119 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08847455-e239-4d88-ba2d-0e17255fcaa3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.298156 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.298204 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gq8rz\" (UniqueName: \"kubernetes.io/projected/08847455-e239-4d88-ba2d-0e17255fcaa3-kube-api-access-gq8rz\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.298471 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.298615 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-scripts\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.321303 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.399956 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400013 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400061 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400156 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwgzr\" (UniqueName: \"kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400254 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400294 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400448 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle\") pod \"bc95ee19-cd3d-4f92-accc-637656c66069\" (UID: \"bc95ee19-cd3d-4f92-accc-637656c66069\") " Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400714 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400841 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.400969 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401070 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-scripts\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401170 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401386 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08847455-e239-4d88-ba2d-0e17255fcaa3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401411 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401440 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gq8rz\" (UniqueName: \"kubernetes.io/projected/08847455-e239-4d88-ba2d-0e17255fcaa3-kube-api-access-gq8rz\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401613 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401632 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bc95ee19-cd3d-4f92-accc-637656c66069-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.401991 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/08847455-e239-4d88-ba2d-0e17255fcaa3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.406151 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.408332 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr" (OuterVolumeSpecName: "kube-api-access-cwgzr") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "kube-api-access-cwgzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.409318 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.409905 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.416865 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts" (OuterVolumeSpecName: "scripts") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.418228 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gq8rz\" (UniqueName: \"kubernetes.io/projected/08847455-e239-4d88-ba2d-0e17255fcaa3-kube-api-access-gq8rz\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.419628 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08847455-e239-4d88-ba2d-0e17255fcaa3-scripts\") pod \"cinder-scheduler-0\" (UID: \"08847455-e239-4d88-ba2d-0e17255fcaa3\") " pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.438447 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.473672 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.480388 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data" (OuterVolumeSpecName: "config-data") pod "bc95ee19-cd3d-4f92-accc-637656c66069" (UID: "bc95ee19-cd3d-4f92-accc-637656c66069"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.504277 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.504311 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.504325 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.504335 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc95ee19-cd3d-4f92-accc-637656c66069-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.504347 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwgzr\" (UniqueName: \"kubernetes.io/projected/bc95ee19-cd3d-4f92-accc-637656c66069-kube-api-access-cwgzr\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.546492 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.761605 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="599bd62f-6f31-4255-b3b2-083faf500f85" path="/var/lib/kubelet/pods/599bd62f-6f31-4255-b3b2-083faf500f85/volumes" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.792911 4632 generic.go:334] "Generic (PLEG): container finished" podID="b7cf908d-5642-4d23-9874-d4b7b1d3f323" containerID="db63ecd323b208a59bae25a8381222ab52945ec5c6a583c001bcd6c7f2d37fde" exitCode=0 Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.793069 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-46f1-account-create-update-5zv72" event={"ID":"b7cf908d-5642-4d23-9874-d4b7b1d3f323","Type":"ContainerDied","Data":"db63ecd323b208a59bae25a8381222ab52945ec5c6a583c001bcd6c7f2d37fde"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.796109 4632 generic.go:334] "Generic (PLEG): container finished" podID="227dea0e-053e-4c4e-a209-5b6ad4f9145f" containerID="3b2c01c075f3736e44a5a7538a150d0f10e59fd5da228d13e67c52ef1427ee7c" exitCode=0 Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.796258 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" event={"ID":"227dea0e-053e-4c4e-a209-5b6ad4f9145f","Type":"ContainerDied","Data":"3b2c01c075f3736e44a5a7538a150d0f10e59fd5da228d13e67c52ef1427ee7c"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.805848 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc95ee19-cd3d-4f92-accc-637656c66069" containerID="70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a" exitCode=0 Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.805926 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerDied","Data":"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.805959 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bc95ee19-cd3d-4f92-accc-637656c66069","Type":"ContainerDied","Data":"8a68fc7d5a15ae71af0634dd3dc5af4fe0fb689d590eab5588bdc0068d18bbc5"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.805981 4632 scope.go:117] "RemoveContainer" containerID="32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.806104 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.812057 4632 generic.go:334] "Generic (PLEG): container finished" podID="e705f257-798d-43b0-985c-bf3499b2c720" containerID="5217b31c470a73a8dc7553d7244bbee4649cf93862343b9d4e3ba4aa6a4f45f5" exitCode=0 Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.812110 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" event={"ID":"e705f257-798d-43b0-985c-bf3499b2c720","Type":"ContainerDied","Data":"5217b31c470a73a8dc7553d7244bbee4649cf93862343b9d4e3ba4aa6a4f45f5"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.813466 4632 generic.go:334] "Generic (PLEG): container finished" podID="80322b90-d8de-4544-8962-3761a3d13e03" containerID="c408ae4834ffcbb51601338cd6039690e9278fb564e1d9926a1d5c4150f2883b" exitCode=0 Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.813744 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x94rl" event={"ID":"80322b90-d8de-4544-8962-3761a3d13e03","Type":"ContainerDied","Data":"c408ae4834ffcbb51601338cd6039690e9278fb564e1d9926a1d5c4150f2883b"} Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.844546 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.858023 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.873261 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.873711 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-notification-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.873732 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-notification-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.873753 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-central-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.873763 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-central-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.873785 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="sg-core" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.873792 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="sg-core" Dec 01 06:58:46 crc kubenswrapper[4632]: E1201 06:58:46.873812 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="proxy-httpd" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.873819 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="proxy-httpd" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.874044 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="proxy-httpd" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.874061 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="sg-core" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.874077 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-notification-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.874096 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" containerName="ceilometer-central-agent" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.875914 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.880797 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.881127 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:58:46 crc kubenswrapper[4632]: I1201 06:58:46.882714 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.002968 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013310 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013449 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013632 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013717 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8tbg\" (UniqueName: \"kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013863 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.013893 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117680 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117745 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117778 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117801 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8tbg\" (UniqueName: \"kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117827 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117854 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.117872 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.119545 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.119663 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.125026 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.125126 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.129257 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.129706 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.136427 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8tbg\" (UniqueName: \"kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg\") pod \"ceilometer-0\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " pod="openstack/ceilometer-0" Dec 01 06:58:47 crc kubenswrapper[4632]: I1201 06:58:47.203383 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:58:48 crc kubenswrapper[4632]: I1201 06:58:48.657097 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 01 06:58:48 crc kubenswrapper[4632]: I1201 06:58:48.765539 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc95ee19-cd3d-4f92-accc-637656c66069" path="/var/lib/kubelet/pods/bc95ee19-cd3d-4f92-accc-637656c66069/volumes" Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.499424 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.499476 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.499522 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.500067 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.500131 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491" gracePeriod=600 Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.862881 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491" exitCode=0 Dec 01 06:58:49 crc kubenswrapper[4632]: I1201 06:58:49.862931 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.582066 4632 scope.go:117] "RemoveContainer" containerID="38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.750579 4632 scope.go:117] "RemoveContainer" containerID="70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.791239 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.814504 4632 scope.go:117] "RemoveContainer" containerID="ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.816401 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.832117 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.838570 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.845664 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.852963 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.860213 4632 scope.go:117] "RemoveContainer" containerID="32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1" Dec 01 06:58:51 crc kubenswrapper[4632]: E1201 06:58:51.862393 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1\": container with ID starting with 32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1 not found: ID does not exist" containerID="32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.862429 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1"} err="failed to get container status \"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1\": rpc error: code = NotFound desc = could not find container \"32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1\": container with ID starting with 32f96c15774221a41c2df09c91676d6d46c543a5f33919dc5b1b406fb17955d1 not found: ID does not exist" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.862479 4632 scope.go:117] "RemoveContainer" containerID="38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5" Dec 01 06:58:51 crc kubenswrapper[4632]: E1201 06:58:51.865330 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5\": container with ID starting with 38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5 not found: ID does not exist" containerID="38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.865380 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5"} err="failed to get container status \"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5\": rpc error: code = NotFound desc = could not find container \"38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5\": container with ID starting with 38cf06f25f88b7e076b9d55523a097805f2822bdc72082b0e53d13c68912eab5 not found: ID does not exist" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.865404 4632 scope.go:117] "RemoveContainer" containerID="70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a" Dec 01 06:58:51 crc kubenswrapper[4632]: E1201 06:58:51.865843 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a\": container with ID starting with 70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a not found: ID does not exist" containerID="70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.865868 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a"} err="failed to get container status \"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a\": rpc error: code = NotFound desc = could not find container \"70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a\": container with ID starting with 70cc31822087780048289b3347fd49b9c67f99a57d77e793113b7a65ccd2cb7a not found: ID does not exist" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.865884 4632 scope.go:117] "RemoveContainer" containerID="ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c" Dec 01 06:58:51 crc kubenswrapper[4632]: E1201 06:58:51.866677 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c\": container with ID starting with ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c not found: ID does not exist" containerID="ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.866703 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c"} err="failed to get container status \"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c\": rpc error: code = NotFound desc = could not find container \"ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c\": container with ID starting with ec1190518c8b14a6ab20aeb38738331506887ada8ae059bd234e5473a7b03a1c not found: ID does not exist" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.866717 4632 scope.go:117] "RemoveContainer" containerID="e2c37d74168c2266d0e222524d08e21b2b2ded4712e89e7f6ccc10e73c3e4637" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.897940 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-46f1-account-create-update-5zv72" event={"ID":"b7cf908d-5642-4d23-9874-d4b7b1d3f323","Type":"ContainerDied","Data":"02e4941b7ee69bc992b0a8be06b8d54df60a6955034394542055cface56dc1d0"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.897985 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02e4941b7ee69bc992b0a8be06b8d54df60a6955034394542055cface56dc1d0" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.897959 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-46f1-account-create-update-5zv72" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.901656 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"08847455-e239-4d88-ba2d-0e17255fcaa3","Type":"ContainerStarted","Data":"39e659eb08e151f7c1ed48455b9c6bc54fbdd2845be24a15bacf6b60d1ef9238"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.908120 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" event={"ID":"227dea0e-053e-4c4e-a209-5b6ad4f9145f","Type":"ContainerDied","Data":"1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.908162 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1224969288c61eea15731183d65d4d0c699edef5a286f7380e3e18af99c9e3fe" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.908238 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-05f8-account-create-update-wr5n9" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.915789 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-pmhkj" event={"ID":"fbecd90a-c90a-4fe8-a349-7701b7256955","Type":"ContainerDied","Data":"843b75a5ff8ffec84ee07d73b4018920a40451567d48816d0740f1b3c039e63f"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.915812 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-pmhkj" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.915814 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="843b75a5ff8ffec84ee07d73b4018920a40451567d48816d0740f1b3c039e63f" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.919284 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-x94rl" event={"ID":"80322b90-d8de-4544-8962-3761a3d13e03","Type":"ContainerDied","Data":"fe328eacb6766bc84d552d9da198e0f786c5fe2896dd1477e0ecf5d3c7aeac5e"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.919310 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe328eacb6766bc84d552d9da198e0f786c5fe2896dd1477e0ecf5d3c7aeac5e" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.919313 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-x94rl" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929648 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wsxjc\" (UniqueName: \"kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc\") pod \"80322b90-d8de-4544-8962-3761a3d13e03\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929712 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts\") pod \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929771 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w74st\" (UniqueName: \"kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st\") pod \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929831 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts\") pod \"484f5acc-9309-4561-8055-1fc5df33e183\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929847 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts\") pod \"fbecd90a-c90a-4fe8-a349-7701b7256955\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929903 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-792pd\" (UniqueName: \"kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd\") pod \"e705f257-798d-43b0-985c-bf3499b2c720\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929937 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ctmcv\" (UniqueName: \"kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv\") pod \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\" (UID: \"227dea0e-053e-4c4e-a209-5b6ad4f9145f\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.929992 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7xlw\" (UniqueName: \"kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw\") pod \"484f5acc-9309-4561-8055-1fc5df33e183\" (UID: \"484f5acc-9309-4561-8055-1fc5df33e183\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930092 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts\") pod \"80322b90-d8de-4544-8962-3761a3d13e03\" (UID: \"80322b90-d8de-4544-8962-3761a3d13e03\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930124 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts\") pod \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\" (UID: \"b7cf908d-5642-4d23-9874-d4b7b1d3f323\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930163 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts\") pod \"e705f257-798d-43b0-985c-bf3499b2c720\" (UID: \"e705f257-798d-43b0-985c-bf3499b2c720\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930183 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rkln\" (UniqueName: \"kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln\") pod \"fbecd90a-c90a-4fe8-a349-7701b7256955\" (UID: \"fbecd90a-c90a-4fe8-a349-7701b7256955\") " Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930454 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "227dea0e-053e-4c4e-a209-5b6ad4f9145f" (UID: "227dea0e-053e-4c4e-a209-5b6ad4f9145f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.930999 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/227dea0e-053e-4c4e-a209-5b6ad4f9145f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.931583 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "80322b90-d8de-4544-8962-3761a3d13e03" (UID: "80322b90-d8de-4544-8962-3761a3d13e03"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.931890 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b7cf908d-5642-4d23-9874-d4b7b1d3f323" (UID: "b7cf908d-5642-4d23-9874-d4b7b1d3f323"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.932483 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fbecd90a-c90a-4fe8-a349-7701b7256955" (UID: "fbecd90a-c90a-4fe8-a349-7701b7256955"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.933724 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "484f5acc-9309-4561-8055-1fc5df33e183" (UID: "484f5acc-9309-4561-8055-1fc5df33e183"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.934054 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e705f257-798d-43b0-985c-bf3499b2c720" (UID: "e705f257-798d-43b0-985c-bf3499b2c720"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.934814 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv" (OuterVolumeSpecName: "kube-api-access-ctmcv") pod "227dea0e-053e-4c4e-a209-5b6ad4f9145f" (UID: "227dea0e-053e-4c4e-a209-5b6ad4f9145f"). InnerVolumeSpecName "kube-api-access-ctmcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.935330 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-fnpks" event={"ID":"484f5acc-9309-4561-8055-1fc5df33e183","Type":"ContainerDied","Data":"29f3ab514fb03c6998d98094f5f929b356563fb29b8a664143f9a64cea921bf4"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.936026 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29f3ab514fb03c6998d98094f5f929b356563fb29b8a664143f9a64cea921bf4" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.939507 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-fnpks" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.942902 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd" (OuterVolumeSpecName: "kube-api-access-792pd") pod "e705f257-798d-43b0-985c-bf3499b2c720" (UID: "e705f257-798d-43b0-985c-bf3499b2c720"). InnerVolumeSpecName "kube-api-access-792pd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.942957 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st" (OuterVolumeSpecName: "kube-api-access-w74st") pod "b7cf908d-5642-4d23-9874-d4b7b1d3f323" (UID: "b7cf908d-5642-4d23-9874-d4b7b1d3f323"). InnerVolumeSpecName "kube-api-access-w74st". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.943143 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc" (OuterVolumeSpecName: "kube-api-access-wsxjc") pod "80322b90-d8de-4544-8962-3761a3d13e03" (UID: "80322b90-d8de-4544-8962-3761a3d13e03"). InnerVolumeSpecName "kube-api-access-wsxjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.943474 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw" (OuterVolumeSpecName: "kube-api-access-f7xlw") pod "484f5acc-9309-4561-8055-1fc5df33e183" (UID: "484f5acc-9309-4561-8055-1fc5df33e183"). InnerVolumeSpecName "kube-api-access-f7xlw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.945886 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln" (OuterVolumeSpecName: "kube-api-access-8rkln") pod "fbecd90a-c90a-4fe8-a349-7701b7256955" (UID: "fbecd90a-c90a-4fe8-a349-7701b7256955"). InnerVolumeSpecName "kube-api-access-8rkln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.946180 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" event={"ID":"e705f257-798d-43b0-985c-bf3499b2c720","Type":"ContainerDied","Data":"22c2714ad31a3e086f40fda3f828b86f92246fa336314db6423d4dd6cfa0a5e5"} Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.946228 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22c2714ad31a3e086f40fda3f828b86f92246fa336314db6423d4dd6cfa0a5e5" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.946246 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-05bc-account-create-update-58k9r" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.953037 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.026513661 podStartE2EDuration="13.953019814s" podCreationTimestamp="2025-12-01 06:58:38 +0000 UTC" firstStartedPulling="2025-12-01 06:58:39.700690238 +0000 UTC m=+929.265703222" lastFinishedPulling="2025-12-01 06:58:51.627196402 +0000 UTC m=+941.192209375" observedRunningTime="2025-12-01 06:58:51.941497839 +0000 UTC m=+941.506510801" watchObservedRunningTime="2025-12-01 06:58:51.953019814 +0000 UTC m=+941.518032787" Dec 01 06:58:51 crc kubenswrapper[4632]: I1201 06:58:51.996694 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:51 crc kubenswrapper[4632]: W1201 06:58:51.999028 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda99f5bb0_0d0b_4bde_91eb_bf6cc7108aa3.slice/crio-a6d19a39144d4da2d6f09d966d9615d99e232cc6d2702595f97ac4d81bb1396a WatchSource:0}: Error finding container a6d19a39144d4da2d6f09d966d9615d99e232cc6d2702595f97ac4d81bb1396a: Status 404 returned error can't find the container with id a6d19a39144d4da2d6f09d966d9615d99e232cc6d2702595f97ac4d81bb1396a Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033672 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w74st\" (UniqueName: \"kubernetes.io/projected/b7cf908d-5642-4d23-9874-d4b7b1d3f323-kube-api-access-w74st\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033705 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fbecd90a-c90a-4fe8-a349-7701b7256955-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033715 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/484f5acc-9309-4561-8055-1fc5df33e183-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033725 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-792pd\" (UniqueName: \"kubernetes.io/projected/e705f257-798d-43b0-985c-bf3499b2c720-kube-api-access-792pd\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033765 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ctmcv\" (UniqueName: \"kubernetes.io/projected/227dea0e-053e-4c4e-a209-5b6ad4f9145f-kube-api-access-ctmcv\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033788 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7xlw\" (UniqueName: \"kubernetes.io/projected/484f5acc-9309-4561-8055-1fc5df33e183-kube-api-access-f7xlw\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033799 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/80322b90-d8de-4544-8962-3761a3d13e03-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033811 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7cf908d-5642-4d23-9874-d4b7b1d3f323-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033822 4632 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e705f257-798d-43b0-985c-bf3499b2c720-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033833 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rkln\" (UniqueName: \"kubernetes.io/projected/fbecd90a-c90a-4fe8-a349-7701b7256955-kube-api-access-8rkln\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.033845 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wsxjc\" (UniqueName: \"kubernetes.io/projected/80322b90-d8de-4544-8962-3761a3d13e03-kube-api-access-wsxjc\") on node \"crc\" DevicePath \"\"" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.258121 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.266507 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-7697b7f499-t6njt" Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.957565 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerStarted","Data":"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e"} Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.958200 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerStarted","Data":"a6d19a39144d4da2d6f09d966d9615d99e232cc6d2702595f97ac4d81bb1396a"} Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.962604 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"c29a6239-304a-4a40-8e32-35dfb513bb8f","Type":"ContainerStarted","Data":"d7cb73bf8ed562142c2d7ac9f712b003c6bb2bfd1c478cbb844d9090c56c52ad"} Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.967740 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863"} Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.971969 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"08847455-e239-4d88-ba2d-0e17255fcaa3","Type":"ContainerStarted","Data":"c245116f60de7160f14e04807b673e4e1754b3904d8e261f37bdc3e777762bd3"} Dec 01 06:58:52 crc kubenswrapper[4632]: I1201 06:58:52.972001 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"08847455-e239-4d88-ba2d-0e17255fcaa3","Type":"ContainerStarted","Data":"76ebf855d077cb8a29c2061486ab4c928ef1d913eaf81eb73aa76c810236a0d4"} Dec 01 06:58:53 crc kubenswrapper[4632]: I1201 06:58:53.009775 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=7.009752208 podStartE2EDuration="7.009752208s" podCreationTimestamp="2025-12-01 06:58:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:58:53.003906288 +0000 UTC m=+942.568919262" watchObservedRunningTime="2025-12-01 06:58:53.009752208 +0000 UTC m=+942.574765180" Dec 01 06:58:53 crc kubenswrapper[4632]: I1201 06:58:53.264548 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:58:53 crc kubenswrapper[4632]: I1201 06:58:53.981907 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerStarted","Data":"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b"} Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.253414 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-749r7"] Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254033 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e705f257-798d-43b0-985c-bf3499b2c720" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254063 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e705f257-798d-43b0-985c-bf3499b2c720" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254095 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbecd90a-c90a-4fe8-a349-7701b7256955" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254103 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbecd90a-c90a-4fe8-a349-7701b7256955" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254111 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7cf908d-5642-4d23-9874-d4b7b1d3f323" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254116 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7cf908d-5642-4d23-9874-d4b7b1d3f323" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254123 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484f5acc-9309-4561-8055-1fc5df33e183" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254128 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="484f5acc-9309-4561-8055-1fc5df33e183" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254140 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227dea0e-053e-4c4e-a209-5b6ad4f9145f" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254146 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="227dea0e-053e-4c4e-a209-5b6ad4f9145f" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: E1201 06:58:54.254158 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80322b90-d8de-4544-8962-3761a3d13e03" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254165 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="80322b90-d8de-4544-8962-3761a3d13e03" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254385 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbecd90a-c90a-4fe8-a349-7701b7256955" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254395 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="484f5acc-9309-4561-8055-1fc5df33e183" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254406 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7cf908d-5642-4d23-9874-d4b7b1d3f323" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254414 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e705f257-798d-43b0-985c-bf3499b2c720" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254425 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="80322b90-d8de-4544-8962-3761a3d13e03" containerName="mariadb-database-create" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.254439 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="227dea0e-053e-4c4e-a209-5b6ad4f9145f" containerName="mariadb-account-create-update" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.255007 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.257997 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.258100 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-25mjt" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.258413 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.263849 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-749r7"] Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.391281 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.391623 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.391830 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.391897 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6r2l\" (UniqueName: \"kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.493177 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.493346 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.493483 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.493529 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6r2l\" (UniqueName: \"kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.498954 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.499560 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.500078 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.511201 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6r2l\" (UniqueName: \"kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l\") pod \"nova-cell0-conductor-db-sync-749r7\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.575716 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:58:54 crc kubenswrapper[4632]: I1201 06:58:54.993327 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerStarted","Data":"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee"} Dec 01 06:58:55 crc kubenswrapper[4632]: I1201 06:58:55.076782 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-749r7"] Dec 01 06:58:56 crc kubenswrapper[4632]: I1201 06:58:56.006608 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-749r7" event={"ID":"350c6392-bd06-44ec-98a2-edd392d66bbf","Type":"ContainerStarted","Data":"5bbc46348e77afcf23cddac4e53e1e348c93ba27cc110935a53e0d71c6b1b916"} Dec 01 06:58:56 crc kubenswrapper[4632]: I1201 06:58:56.546638 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.024616 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerStarted","Data":"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843"} Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.024807 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-central-agent" containerID="cri-o://7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e" gracePeriod=30 Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.025089 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.025385 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="proxy-httpd" containerID="cri-o://cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843" gracePeriod=30 Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.025439 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="sg-core" containerID="cri-o://1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee" gracePeriod=30 Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.025475 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-notification-agent" containerID="cri-o://481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b" gracePeriod=30 Dec 01 06:58:57 crc kubenswrapper[4632]: I1201 06:58:57.056569 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.845168943 podStartE2EDuration="11.056549673s" podCreationTimestamp="2025-12-01 06:58:46 +0000 UTC" firstStartedPulling="2025-12-01 06:58:52.011730812 +0000 UTC m=+941.576743785" lastFinishedPulling="2025-12-01 06:58:56.223111542 +0000 UTC m=+945.788124515" observedRunningTime="2025-12-01 06:58:57.045858434 +0000 UTC m=+946.610871407" watchObservedRunningTime="2025-12-01 06:58:57.056549673 +0000 UTC m=+946.621562646" Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034497 4632 generic.go:334] "Generic (PLEG): container finished" podID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerID="cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843" exitCode=0 Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034531 4632 generic.go:334] "Generic (PLEG): container finished" podID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerID="1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee" exitCode=2 Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034542 4632 generic.go:334] "Generic (PLEG): container finished" podID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerID="481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b" exitCode=0 Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034533 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerDied","Data":"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843"} Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034575 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerDied","Data":"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee"} Dec 01 06:58:58 crc kubenswrapper[4632]: I1201 06:58:58.034588 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerDied","Data":"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b"} Dec 01 06:58:59 crc kubenswrapper[4632]: I1201 06:58:59.968291 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018266 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018331 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018382 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8tbg\" (UniqueName: \"kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018405 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018559 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018603 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018689 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml\") pod \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\" (UID: \"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3\") " Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.018954 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.019854 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.020288 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.020303 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.042515 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg" (OuterVolumeSpecName: "kube-api-access-v8tbg") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "kube-api-access-v8tbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.042517 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts" (OuterVolumeSpecName: "scripts") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.057969 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.062886 4632 generic.go:334] "Generic (PLEG): container finished" podID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerID="7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e" exitCode=0 Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.063037 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerDied","Data":"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e"} Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.063121 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3","Type":"ContainerDied","Data":"a6d19a39144d4da2d6f09d966d9615d99e232cc6d2702595f97ac4d81bb1396a"} Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.063193 4632 scope.go:117] "RemoveContainer" containerID="cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.063410 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.087532 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.104223 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data" (OuterVolumeSpecName: "config-data") pod "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" (UID: "a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.122209 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.122246 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.122261 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.122271 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8tbg\" (UniqueName: \"kubernetes.io/projected/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-kube-api-access-v8tbg\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.122281 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.419527 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.439019 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.446917 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:00 crc kubenswrapper[4632]: E1201 06:59:00.447404 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-central-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447427 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-central-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: E1201 06:59:00.447441 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-notification-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447449 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-notification-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: E1201 06:59:00.447471 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="sg-core" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447478 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="sg-core" Dec 01 06:59:00 crc kubenswrapper[4632]: E1201 06:59:00.447494 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="proxy-httpd" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447511 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="proxy-httpd" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447695 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-notification-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447713 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="ceilometer-central-agent" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447730 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="sg-core" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.447744 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" containerName="proxy-httpd" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.449311 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.452676 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.452682 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.454958 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530192 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530519 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530583 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530638 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530764 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530798 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cq84m\" (UniqueName: \"kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.530875 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.632874 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633015 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633049 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633087 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633146 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633175 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cq84m\" (UniqueName: \"kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.633220 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.634257 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.635558 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.638218 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.638458 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.638554 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.638866 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.647882 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cq84m\" (UniqueName: \"kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m\") pod \"ceilometer-0\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.771979 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:00 crc kubenswrapper[4632]: I1201 06:59:00.795832 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3" path="/var/lib/kubelet/pods/a99f5bb0-0d0b-4bde-91eb-bf6cc7108aa3/volumes" Dec 01 06:59:01 crc kubenswrapper[4632]: I1201 06:59:01.750179 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.007176 4632 scope.go:117] "RemoveContainer" containerID="1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.044823 4632 scope.go:117] "RemoveContainer" containerID="481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.171989 4632 scope.go:117] "RemoveContainer" containerID="7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.196372 4632 scope.go:117] "RemoveContainer" containerID="cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843" Dec 01 06:59:04 crc kubenswrapper[4632]: E1201 06:59:04.196812 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843\": container with ID starting with cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843 not found: ID does not exist" containerID="cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.196859 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843"} err="failed to get container status \"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843\": rpc error: code = NotFound desc = could not find container \"cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843\": container with ID starting with cc27a10cadc96520c2490e96a81ae10f027920291445c84cba47d62576067843 not found: ID does not exist" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.196889 4632 scope.go:117] "RemoveContainer" containerID="1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee" Dec 01 06:59:04 crc kubenswrapper[4632]: E1201 06:59:04.197469 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee\": container with ID starting with 1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee not found: ID does not exist" containerID="1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.197495 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee"} err="failed to get container status \"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee\": rpc error: code = NotFound desc = could not find container \"1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee\": container with ID starting with 1cdaa4099f4eb8399d74bf62726502c528622f89f0c006563b7d1be505eee5ee not found: ID does not exist" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.197509 4632 scope.go:117] "RemoveContainer" containerID="481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b" Dec 01 06:59:04 crc kubenswrapper[4632]: E1201 06:59:04.197697 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b\": container with ID starting with 481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b not found: ID does not exist" containerID="481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.197721 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b"} err="failed to get container status \"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b\": rpc error: code = NotFound desc = could not find container \"481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b\": container with ID starting with 481699a492589ad47e02f11426e8dddf071843c54ecce11816ae7c31c7b66a4b not found: ID does not exist" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.197733 4632 scope.go:117] "RemoveContainer" containerID="7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e" Dec 01 06:59:04 crc kubenswrapper[4632]: E1201 06:59:04.197907 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e\": container with ID starting with 7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e not found: ID does not exist" containerID="7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.197928 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e"} err="failed to get container status \"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e\": rpc error: code = NotFound desc = could not find container \"7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e\": container with ID starting with 7f2eb93523dffbc95d871dcf1a18bff06c84b711ea7d294e8852335c6b9be12e not found: ID does not exist" Dec 01 06:59:04 crc kubenswrapper[4632]: I1201 06:59:04.448663 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:05 crc kubenswrapper[4632]: I1201 06:59:05.136817 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-749r7" event={"ID":"350c6392-bd06-44ec-98a2-edd392d66bbf","Type":"ContainerStarted","Data":"6c0b0b9a6d884c4991fe39e3e49bfae5d3aefa4b3f576850703f44fb9cad48c4"} Dec 01 06:59:05 crc kubenswrapper[4632]: I1201 06:59:05.141799 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerStarted","Data":"d6fa2e49cf60125a43bcb016a402844927cfefcfedda79642b98ed2f1f23dd80"} Dec 01 06:59:05 crc kubenswrapper[4632]: I1201 06:59:05.161586 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-749r7" podStartSLOduration=2.197465001 podStartE2EDuration="11.161568326s" podCreationTimestamp="2025-12-01 06:58:54 +0000 UTC" firstStartedPulling="2025-12-01 06:58:55.089470281 +0000 UTC m=+944.654483254" lastFinishedPulling="2025-12-01 06:59:04.053573607 +0000 UTC m=+953.618586579" observedRunningTime="2025-12-01 06:59:05.152978622 +0000 UTC m=+954.717991594" watchObservedRunningTime="2025-12-01 06:59:05.161568326 +0000 UTC m=+954.726581299" Dec 01 06:59:06 crc kubenswrapper[4632]: I1201 06:59:06.154627 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerStarted","Data":"79413fae02c42ae2c99729487f26e80baa83b0eb11d500f14b20afbce1a1e886"} Dec 01 06:59:06 crc kubenswrapper[4632]: I1201 06:59:06.155209 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerStarted","Data":"85ac04cd95f63cb4ec3ada54760d7a6bb7108a51da7a5605ca1af598c2d496c3"} Dec 01 06:59:06 crc kubenswrapper[4632]: I1201 06:59:06.240257 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:06 crc kubenswrapper[4632]: I1201 06:59:06.240523 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-log" containerID="cri-o://dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6" gracePeriod=30 Dec 01 06:59:06 crc kubenswrapper[4632]: I1201 06:59:06.240631 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-httpd" containerID="cri-o://cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8" gracePeriod=30 Dec 01 06:59:07 crc kubenswrapper[4632]: I1201 06:59:07.165693 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerStarted","Data":"9e8dfd17e34da469283f6bcfc9d7bb74c969e95d13f639bfcd2fb34c371f94fb"} Dec 01 06:59:07 crc kubenswrapper[4632]: I1201 06:59:07.167758 4632 generic.go:334] "Generic (PLEG): container finished" podID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerID="dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6" exitCode=143 Dec 01 06:59:07 crc kubenswrapper[4632]: I1201 06:59:07.167803 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerDied","Data":"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6"} Dec 01 06:59:08 crc kubenswrapper[4632]: I1201 06:59:08.190558 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerStarted","Data":"f5e5d924b12c5d504fbab9bf07ff94b1cb2a19d4511adbd34f4d260293f28748"} Dec 01 06:59:08 crc kubenswrapper[4632]: I1201 06:59:08.190891 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 06:59:08 crc kubenswrapper[4632]: I1201 06:59:08.213749 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.6948925379999995 podStartE2EDuration="8.213727498s" podCreationTimestamp="2025-12-01 06:59:00 +0000 UTC" firstStartedPulling="2025-12-01 06:59:04.449107045 +0000 UTC m=+954.014120019" lastFinishedPulling="2025-12-01 06:59:07.967942006 +0000 UTC m=+957.532954979" observedRunningTime="2025-12-01 06:59:08.206177816 +0000 UTC m=+957.771190789" watchObservedRunningTime="2025-12-01 06:59:08.213727498 +0000 UTC m=+957.778740471" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.041966 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.343706 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.343928 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-log" containerID="cri-o://fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b" gracePeriod=30 Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.344009 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-httpd" containerID="cri-o://c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564" gracePeriod=30 Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.935132 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969412 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969540 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969581 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w9lr\" (UniqueName: \"kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969613 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969817 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969868 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.969919 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.970077 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data\") pod \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\" (UID: \"c6717d9d-3b42-4b81-a25d-84d3b3170d55\") " Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.970103 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.970857 4632 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.970955 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs" (OuterVolumeSpecName: "logs") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.975505 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr" (OuterVolumeSpecName: "kube-api-access-4w9lr") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "kube-api-access-4w9lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.977637 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 06:59:09 crc kubenswrapper[4632]: I1201 06:59:09.977929 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts" (OuterVolumeSpecName: "scripts") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.001852 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.032196 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data" (OuterVolumeSpecName: "config-data") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.045479 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c6717d9d-3b42-4b81-a25d-84d3b3170d55" (UID: "c6717d9d-3b42-4b81-a25d-84d3b3170d55"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074007 4632 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074126 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074147 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074173 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6717d9d-3b42-4b81-a25d-84d3b3170d55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074185 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6717d9d-3b42-4b81-a25d-84d3b3170d55-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074197 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w9lr\" (UniqueName: \"kubernetes.io/projected/c6717d9d-3b42-4b81-a25d-84d3b3170d55-kube-api-access-4w9lr\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.074246 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.094316 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.176714 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.215933 4632 generic.go:334] "Generic (PLEG): container finished" podID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerID="cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8" exitCode=0 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.216038 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.216033 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerDied","Data":"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8"} Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.216112 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c6717d9d-3b42-4b81-a25d-84d3b3170d55","Type":"ContainerDied","Data":"57b5ed92ac9d2c09906de429fcc9b8060558073959e559501eb7399a716c5e36"} Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.216140 4632 scope.go:117] "RemoveContainer" containerID="cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.219799 4632 generic.go:334] "Generic (PLEG): container finished" podID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerID="fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b" exitCode=143 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.220108 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-central-agent" containerID="cri-o://85ac04cd95f63cb4ec3ada54760d7a6bb7108a51da7a5605ca1af598c2d496c3" gracePeriod=30 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.220453 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="proxy-httpd" containerID="cri-o://f5e5d924b12c5d504fbab9bf07ff94b1cb2a19d4511adbd34f4d260293f28748" gracePeriod=30 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.220480 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerDied","Data":"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b"} Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.220518 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="sg-core" containerID="cri-o://9e8dfd17e34da469283f6bcfc9d7bb74c969e95d13f639bfcd2fb34c371f94fb" gracePeriod=30 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.220559 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-notification-agent" containerID="cri-o://79413fae02c42ae2c99729487f26e80baa83b0eb11d500f14b20afbce1a1e886" gracePeriod=30 Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.245973 4632 scope.go:117] "RemoveContainer" containerID="dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.265830 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.269534 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.272737 4632 scope.go:117] "RemoveContainer" containerID="cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8" Dec 01 06:59:10 crc kubenswrapper[4632]: E1201 06:59:10.273470 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8\": container with ID starting with cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8 not found: ID does not exist" containerID="cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.273501 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8"} err="failed to get container status \"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8\": rpc error: code = NotFound desc = could not find container \"cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8\": container with ID starting with cfce32c5641fa74da2bec51a639cf4b543a742d56f2244e815adadb8b45a0fa8 not found: ID does not exist" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.273520 4632 scope.go:117] "RemoveContainer" containerID="dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6" Dec 01 06:59:10 crc kubenswrapper[4632]: E1201 06:59:10.273769 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6\": container with ID starting with dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6 not found: ID does not exist" containerID="dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.273785 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6"} err="failed to get container status \"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6\": rpc error: code = NotFound desc = could not find container \"dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6\": container with ID starting with dcb414ea911d5d042dec3aaf925d0c26f7813694cdb064768910c2df3910c5c6 not found: ID does not exist" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.285491 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:10 crc kubenswrapper[4632]: E1201 06:59:10.285899 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-httpd" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.285920 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-httpd" Dec 01 06:59:10 crc kubenswrapper[4632]: E1201 06:59:10.285935 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-log" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.285942 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-log" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.286171 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-httpd" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.286205 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" containerName="glance-log" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.287194 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.288950 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.289162 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.302639 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384282 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384598 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8nfm\" (UniqueName: \"kubernetes.io/projected/e8ac0c6b-4bf6-4259-bad6-9c0620047334-kube-api-access-f8nfm\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384690 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384714 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384796 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-config-data\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384843 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-scripts\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384899 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-logs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.384943 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487429 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487632 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487666 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8nfm\" (UniqueName: \"kubernetes.io/projected/e8ac0c6b-4bf6-4259-bad6-9c0620047334-kube-api-access-f8nfm\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487753 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487781 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487886 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-config-data\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.487953 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-scripts\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.488018 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-logs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.488489 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-logs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.488717 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e8ac0c6b-4bf6-4259-bad6-9c0620047334-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.489198 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.492972 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.493090 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.494673 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-config-data\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.497925 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e8ac0c6b-4bf6-4259-bad6-9c0620047334-scripts\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.506116 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8nfm\" (UniqueName: \"kubernetes.io/projected/e8ac0c6b-4bf6-4259-bad6-9c0620047334-kube-api-access-f8nfm\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.514103 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"e8ac0c6b-4bf6-4259-bad6-9c0620047334\") " pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.612393 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 01 06:59:10 crc kubenswrapper[4632]: I1201 06:59:10.779423 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6717d9d-3b42-4b81-a25d-84d3b3170d55" path="/var/lib/kubelet/pods/c6717d9d-3b42-4b81-a25d-84d3b3170d55/volumes" Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.102878 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232379 4632 generic.go:334] "Generic (PLEG): container finished" podID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerID="f5e5d924b12c5d504fbab9bf07ff94b1cb2a19d4511adbd34f4d260293f28748" exitCode=0 Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232419 4632 generic.go:334] "Generic (PLEG): container finished" podID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerID="9e8dfd17e34da469283f6bcfc9d7bb74c969e95d13f639bfcd2fb34c371f94fb" exitCode=2 Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232412 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerDied","Data":"f5e5d924b12c5d504fbab9bf07ff94b1cb2a19d4511adbd34f4d260293f28748"} Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232457 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerDied","Data":"9e8dfd17e34da469283f6bcfc9d7bb74c969e95d13f639bfcd2fb34c371f94fb"} Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232472 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerDied","Data":"79413fae02c42ae2c99729487f26e80baa83b0eb11d500f14b20afbce1a1e886"} Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.232430 4632 generic.go:334] "Generic (PLEG): container finished" podID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerID="79413fae02c42ae2c99729487f26e80baa83b0eb11d500f14b20afbce1a1e886" exitCode=0 Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.233830 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e8ac0c6b-4bf6-4259-bad6-9c0620047334","Type":"ContainerStarted","Data":"2169c2946f70f7db2d50a74cb107cf8fbcea23ad8905cf1adcedcb86c09c5377"} Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.236037 4632 generic.go:334] "Generic (PLEG): container finished" podID="350c6392-bd06-44ec-98a2-edd392d66bbf" containerID="6c0b0b9a6d884c4991fe39e3e49bfae5d3aefa4b3f576850703f44fb9cad48c4" exitCode=0 Dec 01 06:59:11 crc kubenswrapper[4632]: I1201 06:59:11.236063 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-749r7" event={"ID":"350c6392-bd06-44ec-98a2-edd392d66bbf","Type":"ContainerDied","Data":"6c0b0b9a6d884c4991fe39e3e49bfae5d3aefa4b3f576850703f44fb9cad48c4"} Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.250689 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e8ac0c6b-4bf6-4259-bad6-9c0620047334","Type":"ContainerStarted","Data":"5d79565f76df42941880f08ef456f7491fca515767c884b37f96fb5fa1faa5a1"} Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.251483 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e8ac0c6b-4bf6-4259-bad6-9c0620047334","Type":"ContainerStarted","Data":"626631563a002582ad225294b7ad5a3b0b67d76067e39d989d5ff43fdf274af8"} Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.284107 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=2.284075121 podStartE2EDuration="2.284075121s" podCreationTimestamp="2025-12-01 06:59:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:12.274995132 +0000 UTC m=+961.840008105" watchObservedRunningTime="2025-12-01 06:59:12.284075121 +0000 UTC m=+961.849088104" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.677189 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.730397 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle\") pod \"350c6392-bd06-44ec-98a2-edd392d66bbf\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.730510 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data\") pod \"350c6392-bd06-44ec-98a2-edd392d66bbf\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.730574 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts\") pod \"350c6392-bd06-44ec-98a2-edd392d66bbf\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.730681 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6r2l\" (UniqueName: \"kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l\") pod \"350c6392-bd06-44ec-98a2-edd392d66bbf\" (UID: \"350c6392-bd06-44ec-98a2-edd392d66bbf\") " Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.738714 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts" (OuterVolumeSpecName: "scripts") pod "350c6392-bd06-44ec-98a2-edd392d66bbf" (UID: "350c6392-bd06-44ec-98a2-edd392d66bbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.741475 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l" (OuterVolumeSpecName: "kube-api-access-f6r2l") pod "350c6392-bd06-44ec-98a2-edd392d66bbf" (UID: "350c6392-bd06-44ec-98a2-edd392d66bbf"). InnerVolumeSpecName "kube-api-access-f6r2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.759671 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data" (OuterVolumeSpecName: "config-data") pod "350c6392-bd06-44ec-98a2-edd392d66bbf" (UID: "350c6392-bd06-44ec-98a2-edd392d66bbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.764259 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "350c6392-bd06-44ec-98a2-edd392d66bbf" (UID: "350c6392-bd06-44ec-98a2-edd392d66bbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.834384 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.834587 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.834612 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6r2l\" (UniqueName: \"kubernetes.io/projected/350c6392-bd06-44ec-98a2-edd392d66bbf-kube-api-access-f6r2l\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.834624 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350c6392-bd06-44ec-98a2-edd392d66bbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:12 crc kubenswrapper[4632]: I1201 06:59:12.890206 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.056227 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.056304 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.056401 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.056428 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057171 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057281 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057401 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057685 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs" (OuterVolumeSpecName: "logs") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057763 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czgm8\" (UniqueName: \"kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.057808 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"d1206021-ebee-4c1a-a195-a0ca8c95b324\" (UID: \"d1206021-ebee-4c1a-a195-a0ca8c95b324\") " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.058104 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.058125 4632 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d1206021-ebee-4c1a-a195-a0ca8c95b324-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.061208 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts" (OuterVolumeSpecName: "scripts") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.063146 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "glance") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.063292 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8" (OuterVolumeSpecName: "kube-api-access-czgm8") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "kube-api-access-czgm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.082505 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.103348 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.109374 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data" (OuterVolumeSpecName: "config-data") pod "d1206021-ebee-4c1a-a195-a0ca8c95b324" (UID: "d1206021-ebee-4c1a-a195-a0ca8c95b324"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159393 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159422 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159431 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159442 4632 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d1206021-ebee-4c1a-a195-a0ca8c95b324-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159451 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czgm8\" (UniqueName: \"kubernetes.io/projected/d1206021-ebee-4c1a-a195-a0ca8c95b324-kube-api-access-czgm8\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.159481 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.177608 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.262077 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.267029 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-749r7" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.267033 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-749r7" event={"ID":"350c6392-bd06-44ec-98a2-edd392d66bbf","Type":"ContainerDied","Data":"5bbc46348e77afcf23cddac4e53e1e348c93ba27cc110935a53e0d71c6b1b916"} Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.267101 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bbc46348e77afcf23cddac4e53e1e348c93ba27cc110935a53e0d71c6b1b916" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.270688 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerDied","Data":"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564"} Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.270749 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.270796 4632 scope.go:117] "RemoveContainer" containerID="c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.270619 4632 generic.go:334] "Generic (PLEG): container finished" podID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerID="c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564" exitCode=0 Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.271114 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d1206021-ebee-4c1a-a195-a0ca8c95b324","Type":"ContainerDied","Data":"1947181e6684674fd6e975c6f6b88a0ba9ba484a7872c2263cf7152e6f4d8fca"} Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.305286 4632 scope.go:117] "RemoveContainer" containerID="fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.325975 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.345926 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.354489 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: E1201 06:59:13.354982 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="350c6392-bd06-44ec-98a2-edd392d66bbf" containerName="nova-cell0-conductor-db-sync" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355002 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="350c6392-bd06-44ec-98a2-edd392d66bbf" containerName="nova-cell0-conductor-db-sync" Dec 01 06:59:13 crc kubenswrapper[4632]: E1201 06:59:13.355022 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-httpd" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355030 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-httpd" Dec 01 06:59:13 crc kubenswrapper[4632]: E1201 06:59:13.355042 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-log" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355048 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-log" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355254 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-httpd" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355292 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" containerName="glance-log" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.355304 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="350c6392-bd06-44ec-98a2-edd392d66bbf" containerName="nova-cell0-conductor-db-sync" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.356343 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.358637 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.358719 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.370323 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.371172 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.375729 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-25mjt" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.376049 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.381784 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.382152 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.386956 4632 scope.go:117] "RemoveContainer" containerID="c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564" Dec 01 06:59:13 crc kubenswrapper[4632]: E1201 06:59:13.387785 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564\": container with ID starting with c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564 not found: ID does not exist" containerID="c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.387820 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564"} err="failed to get container status \"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564\": rpc error: code = NotFound desc = could not find container \"c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564\": container with ID starting with c91dc82b748ed57a47fdbcefeffcc323609eb54388ac200cd40b8207c5544564 not found: ID does not exist" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.387847 4632 scope.go:117] "RemoveContainer" containerID="fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b" Dec 01 06:59:13 crc kubenswrapper[4632]: E1201 06:59:13.389315 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b\": container with ID starting with fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b not found: ID does not exist" containerID="fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.389344 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b"} err="failed to get container status \"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b\": rpc error: code = NotFound desc = could not find container \"fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b\": container with ID starting with fa16f77563add50962a4ef17b14ff84b53af168d5477e35bf67677aeeb1f0a5b not found: ID does not exist" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465555 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465651 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465717 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465747 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465875 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq6qj\" (UniqueName: \"kubernetes.io/projected/3524161d-d124-4b04-9b25-73e2e3188c7f-kube-api-access-lq6qj\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.465905 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.466028 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-logs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.466060 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568238 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568294 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-logs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568326 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568408 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568486 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568536 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568561 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568626 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568659 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq6qj\" (UniqueName: \"kubernetes.io/projected/3524161d-d124-4b04-9b25-73e2e3188c7f-kube-api-access-lq6qj\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568690 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568707 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-logs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568733 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgf9h\" (UniqueName: \"kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.568751 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.569199 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3524161d-d124-4b04-9b25-73e2e3188c7f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.573053 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.574093 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.574187 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.574534 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3524161d-d124-4b04-9b25-73e2e3188c7f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.583224 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq6qj\" (UniqueName: \"kubernetes.io/projected/3524161d-d124-4b04-9b25-73e2e3188c7f-kube-api-access-lq6qj\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.598983 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"glance-default-internal-api-0\" (UID: \"3524161d-d124-4b04-9b25-73e2e3188c7f\") " pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.670836 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.670950 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.670988 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgf9h\" (UniqueName: \"kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.674766 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.677245 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.684956 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgf9h\" (UniqueName: \"kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h\") pod \"nova-cell0-conductor-0\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.691863 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:13 crc kubenswrapper[4632]: I1201 06:59:13.714364 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:14 crc kubenswrapper[4632]: I1201 06:59:14.225446 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:14 crc kubenswrapper[4632]: I1201 06:59:14.296701 4632 generic.go:334] "Generic (PLEG): container finished" podID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerID="85ac04cd95f63cb4ec3ada54760d7a6bb7108a51da7a5605ca1af598c2d496c3" exitCode=0 Dec 01 06:59:14 crc kubenswrapper[4632]: I1201 06:59:14.296767 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerDied","Data":"85ac04cd95f63cb4ec3ada54760d7a6bb7108a51da7a5605ca1af598c2d496c3"} Dec 01 06:59:14 crc kubenswrapper[4632]: I1201 06:59:14.346190 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 01 06:59:14 crc kubenswrapper[4632]: W1201 06:59:14.354538 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3524161d_d124_4b04_9b25_73e2e3188c7f.slice/crio-29ac4ea409cd2c8f60edf2daa691d3ed10f859806684d671f7f2e8cc40670451 WatchSource:0}: Error finding container 29ac4ea409cd2c8f60edf2daa691d3ed10f859806684d671f7f2e8cc40670451: Status 404 returned error can't find the container with id 29ac4ea409cd2c8f60edf2daa691d3ed10f859806684d671f7f2e8cc40670451 Dec 01 06:59:14 crc kubenswrapper[4632]: I1201 06:59:14.759064 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1206021-ebee-4c1a-a195-a0ca8c95b324" path="/var/lib/kubelet/pods/d1206021-ebee-4c1a-a195-a0ca8c95b324/volumes" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.127947 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209209 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209310 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209422 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209491 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209514 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209534 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.209554 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cq84m\" (UniqueName: \"kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m\") pod \"e6969855-7e1c-46bd-8fc4-b168485224aa\" (UID: \"e6969855-7e1c-46bd-8fc4-b168485224aa\") " Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.210473 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.210625 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.217496 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts" (OuterVolumeSpecName: "scripts") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.217515 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m" (OuterVolumeSpecName: "kube-api-access-cq84m") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "kube-api-access-cq84m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.237064 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.285058 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.308056 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data" (OuterVolumeSpecName: "config-data") pod "e6969855-7e1c-46bd-8fc4-b168485224aa" (UID: "e6969855-7e1c-46bd-8fc4-b168485224aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315341 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315404 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315420 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6969855-7e1c-46bd-8fc4-b168485224aa-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315431 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315443 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315454 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6969855-7e1c-46bd-8fc4-b168485224aa-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.315465 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cq84m\" (UniqueName: \"kubernetes.io/projected/e6969855-7e1c-46bd-8fc4-b168485224aa-kube-api-access-cq84m\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.320213 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cdd84ee-b1d9-4556-9c3d-f834d31430d6","Type":"ContainerStarted","Data":"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf"} Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.320290 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cdd84ee-b1d9-4556-9c3d-f834d31430d6","Type":"ContainerStarted","Data":"0b5b9ce5742c61000b9dc91dfb6283904a03d782a3ffa254d706153cc5012c4d"} Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.320555 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.324766 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3524161d-d124-4b04-9b25-73e2e3188c7f","Type":"ContainerStarted","Data":"e8d6c53d614be68d583961e7cc5ffb0758654b6c3ee2e6fb8cadd77d30f73240"} Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.325084 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3524161d-d124-4b04-9b25-73e2e3188c7f","Type":"ContainerStarted","Data":"29ac4ea409cd2c8f60edf2daa691d3ed10f859806684d671f7f2e8cc40670451"} Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.332951 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e6969855-7e1c-46bd-8fc4-b168485224aa","Type":"ContainerDied","Data":"d6fa2e49cf60125a43bcb016a402844927cfefcfedda79642b98ed2f1f23dd80"} Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.332993 4632 scope.go:117] "RemoveContainer" containerID="f5e5d924b12c5d504fbab9bf07ff94b1cb2a19d4511adbd34f4d260293f28748" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.333175 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.354811 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.354789267 podStartE2EDuration="2.354789267s" podCreationTimestamp="2025-12-01 06:59:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:15.340983162 +0000 UTC m=+964.905996136" watchObservedRunningTime="2025-12-01 06:59:15.354789267 +0000 UTC m=+964.919802240" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.365762 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=2.365746608 podStartE2EDuration="2.365746608s" podCreationTimestamp="2025-12-01 06:59:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:15.359538737 +0000 UTC m=+964.924551710" watchObservedRunningTime="2025-12-01 06:59:15.365746608 +0000 UTC m=+964.930759581" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.367711 4632 scope.go:117] "RemoveContainer" containerID="9e8dfd17e34da469283f6bcfc9d7bb74c969e95d13f639bfcd2fb34c371f94fb" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.388003 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.389091 4632 scope.go:117] "RemoveContainer" containerID="79413fae02c42ae2c99729487f26e80baa83b0eb11d500f14b20afbce1a1e886" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.401318 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.413586 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:15 crc kubenswrapper[4632]: E1201 06:59:15.414083 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-central-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.414107 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-central-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: E1201 06:59:15.414129 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-notification-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.414137 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-notification-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: E1201 06:59:15.414936 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="proxy-httpd" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.414952 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="proxy-httpd" Dec 01 06:59:15 crc kubenswrapper[4632]: E1201 06:59:15.414974 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="sg-core" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.414981 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="sg-core" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.415237 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-notification-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.415252 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="ceilometer-central-agent" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.415281 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="sg-core" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.415297 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" containerName="proxy-httpd" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.416507 4632 scope.go:117] "RemoveContainer" containerID="85ac04cd95f63cb4ec3ada54760d7a6bb7108a51da7a5605ca1af598c2d496c3" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.417018 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.419925 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.420090 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.425449 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.519832 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th99c\" (UniqueName: \"kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.519915 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.519960 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.520001 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.520074 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.520176 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.520220 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.621958 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622032 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622129 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622241 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622270 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622438 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th99c\" (UniqueName: \"kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622508 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.622575 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.623232 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.629550 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.629858 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.629900 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.640687 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.642983 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th99c\" (UniqueName: \"kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c\") pod \"ceilometer-0\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " pod="openstack/ceilometer-0" Dec 01 06:59:15 crc kubenswrapper[4632]: I1201 06:59:15.738099 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:16 crc kubenswrapper[4632]: I1201 06:59:16.166547 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:16 crc kubenswrapper[4632]: I1201 06:59:16.193802 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:16 crc kubenswrapper[4632]: I1201 06:59:16.346965 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3524161d-d124-4b04-9b25-73e2e3188c7f","Type":"ContainerStarted","Data":"8b6cb3ec92ed6c69835f43b5e1c8f1873e3d1450d50fe936d2fa8d1d122417d6"} Dec 01 06:59:16 crc kubenswrapper[4632]: I1201 06:59:16.349534 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerStarted","Data":"a4909d1c1cdeca1f820d85556ef30d31c9b95b3930faea6e05bf9e8d75b6973e"} Dec 01 06:59:16 crc kubenswrapper[4632]: I1201 06:59:16.786422 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6969855-7e1c-46bd-8fc4-b168485224aa" path="/var/lib/kubelet/pods/e6969855-7e1c-46bd-8fc4-b168485224aa/volumes" Dec 01 06:59:17 crc kubenswrapper[4632]: I1201 06:59:17.281752 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:17 crc kubenswrapper[4632]: I1201 06:59:17.360703 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerStarted","Data":"0f49b4c7a006aa5c3aa8ed823897a22b4b943474570692520da131ef2bd2e438"} Dec 01 06:59:17 crc kubenswrapper[4632]: I1201 06:59:17.360843 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" containerName="nova-cell0-conductor-conductor" containerID="cri-o://9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf" gracePeriod=30 Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.069838 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.193630 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgf9h\" (UniqueName: \"kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h\") pod \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.193691 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle\") pod \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.194608 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data\") pod \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\" (UID: \"4cdd84ee-b1d9-4556-9c3d-f834d31430d6\") " Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.199972 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h" (OuterVolumeSpecName: "kube-api-access-qgf9h") pod "4cdd84ee-b1d9-4556-9c3d-f834d31430d6" (UID: "4cdd84ee-b1d9-4556-9c3d-f834d31430d6"). InnerVolumeSpecName "kube-api-access-qgf9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.215683 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data" (OuterVolumeSpecName: "config-data") pod "4cdd84ee-b1d9-4556-9c3d-f834d31430d6" (UID: "4cdd84ee-b1d9-4556-9c3d-f834d31430d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.222052 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4cdd84ee-b1d9-4556-9c3d-f834d31430d6" (UID: "4cdd84ee-b1d9-4556-9c3d-f834d31430d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.297666 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgf9h\" (UniqueName: \"kubernetes.io/projected/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-kube-api-access-qgf9h\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.297706 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.297720 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4cdd84ee-b1d9-4556-9c3d-f834d31430d6-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.378536 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerStarted","Data":"a99200df0555fc4d493d8094f490663f223e8bfedc2bebb13f227af97246d9ef"} Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.380565 4632 generic.go:334] "Generic (PLEG): container finished" podID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" containerID="9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf" exitCode=0 Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.380605 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cdd84ee-b1d9-4556-9c3d-f834d31430d6","Type":"ContainerDied","Data":"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf"} Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.380627 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4cdd84ee-b1d9-4556-9c3d-f834d31430d6","Type":"ContainerDied","Data":"0b5b9ce5742c61000b9dc91dfb6283904a03d782a3ffa254d706153cc5012c4d"} Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.380650 4632 scope.go:117] "RemoveContainer" containerID="9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.380829 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.414326 4632 scope.go:117] "RemoveContainer" containerID="9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf" Dec 01 06:59:18 crc kubenswrapper[4632]: E1201 06:59:18.415524 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf\": container with ID starting with 9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf not found: ID does not exist" containerID="9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.415573 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf"} err="failed to get container status \"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf\": rpc error: code = NotFound desc = could not find container \"9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf\": container with ID starting with 9271e9d907f381bba846247cff4189562b220144b55716aaf5a64ad30b48faaf not found: ID does not exist" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.430781 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.452135 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.467056 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:18 crc kubenswrapper[4632]: E1201 06:59:18.467831 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" containerName="nova-cell0-conductor-conductor" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.467858 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" containerName="nova-cell0-conductor-conductor" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.468118 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" containerName="nova-cell0-conductor-conductor" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.469245 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.475253 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.475377 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-25mjt" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.481794 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.607165 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv29b\" (UniqueName: \"kubernetes.io/projected/a3304516-deb9-4715-a501-c0b1dbb89945-kube-api-access-rv29b\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.607264 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.607493 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.708302 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.708406 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv29b\" (UniqueName: \"kubernetes.io/projected/a3304516-deb9-4715-a501-c0b1dbb89945-kube-api-access-rv29b\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.708447 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.712788 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.715926 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3304516-deb9-4715-a501-c0b1dbb89945-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.724244 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv29b\" (UniqueName: \"kubernetes.io/projected/a3304516-deb9-4715-a501-c0b1dbb89945-kube-api-access-rv29b\") pod \"nova-cell0-conductor-0\" (UID: \"a3304516-deb9-4715-a501-c0b1dbb89945\") " pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.758169 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4cdd84ee-b1d9-4556-9c3d-f834d31430d6" path="/var/lib/kubelet/pods/4cdd84ee-b1d9-4556-9c3d-f834d31430d6/volumes" Dec 01 06:59:18 crc kubenswrapper[4632]: I1201 06:59:18.786344 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:19 crc kubenswrapper[4632]: I1201 06:59:19.202365 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 01 06:59:19 crc kubenswrapper[4632]: I1201 06:59:19.429884 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a3304516-deb9-4715-a501-c0b1dbb89945","Type":"ContainerStarted","Data":"e8be83ed1409c2da8c16d7be98c6cb94c7476c73f3332d63d194914ef545ca02"} Dec 01 06:59:19 crc kubenswrapper[4632]: I1201 06:59:19.430279 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"a3304516-deb9-4715-a501-c0b1dbb89945","Type":"ContainerStarted","Data":"a11fab2f74fe4a4219773e893fef4375671934196052d03a7060f64c1c38b29f"} Dec 01 06:59:19 crc kubenswrapper[4632]: I1201 06:59:19.435415 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerStarted","Data":"f29948b23b942b17bd546f54fccce423871de610e1d66b6a1ab90b21536cf61e"} Dec 01 06:59:19 crc kubenswrapper[4632]: I1201 06:59:19.449021 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=1.449005398 podStartE2EDuration="1.449005398s" podCreationTimestamp="2025-12-01 06:59:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:19.448994557 +0000 UTC m=+969.014007530" watchObservedRunningTime="2025-12-01 06:59:19.449005398 +0000 UTC m=+969.014018371" Dec 01 06:59:20 crc kubenswrapper[4632]: I1201 06:59:20.445633 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:20 crc kubenswrapper[4632]: I1201 06:59:20.613177 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 06:59:20 crc kubenswrapper[4632]: I1201 06:59:20.613239 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 01 06:59:20 crc kubenswrapper[4632]: I1201 06:59:20.641132 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 06:59:20 crc kubenswrapper[4632]: I1201 06:59:20.645793 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.457618 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerStarted","Data":"83e4c8c3ea9beeb56e350d29fe62112b42ca9c17082739689c7b1f8275a1883f"} Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458242 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-central-agent" containerID="cri-o://0f49b4c7a006aa5c3aa8ed823897a22b4b943474570692520da131ef2bd2e438" gracePeriod=30 Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458280 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="sg-core" containerID="cri-o://f29948b23b942b17bd546f54fccce423871de610e1d66b6a1ab90b21536cf61e" gracePeriod=30 Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458367 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="proxy-httpd" containerID="cri-o://83e4c8c3ea9beeb56e350d29fe62112b42ca9c17082739689c7b1f8275a1883f" gracePeriod=30 Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458266 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458506 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.458468 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-notification-agent" containerID="cri-o://a99200df0555fc4d493d8094f490663f223e8bfedc2bebb13f227af97246d9ef" gracePeriod=30 Dec 01 06:59:21 crc kubenswrapper[4632]: I1201 06:59:21.492468 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.20302384 podStartE2EDuration="6.492440093s" podCreationTimestamp="2025-12-01 06:59:15 +0000 UTC" firstStartedPulling="2025-12-01 06:59:16.170979059 +0000 UTC m=+965.735992033" lastFinishedPulling="2025-12-01 06:59:20.460395313 +0000 UTC m=+970.025408286" observedRunningTime="2025-12-01 06:59:21.485938608 +0000 UTC m=+971.050951582" watchObservedRunningTime="2025-12-01 06:59:21.492440093 +0000 UTC m=+971.057453066" Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.470673 4632 generic.go:334] "Generic (PLEG): container finished" podID="c21cb575-7b86-455a-8999-caff25488217" containerID="83e4c8c3ea9beeb56e350d29fe62112b42ca9c17082739689c7b1f8275a1883f" exitCode=0 Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.470994 4632 generic.go:334] "Generic (PLEG): container finished" podID="c21cb575-7b86-455a-8999-caff25488217" containerID="f29948b23b942b17bd546f54fccce423871de610e1d66b6a1ab90b21536cf61e" exitCode=2 Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.471004 4632 generic.go:334] "Generic (PLEG): container finished" podID="c21cb575-7b86-455a-8999-caff25488217" containerID="a99200df0555fc4d493d8094f490663f223e8bfedc2bebb13f227af97246d9ef" exitCode=0 Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.470721 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerDied","Data":"83e4c8c3ea9beeb56e350d29fe62112b42ca9c17082739689c7b1f8275a1883f"} Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.471845 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerDied","Data":"f29948b23b942b17bd546f54fccce423871de610e1d66b6a1ab90b21536cf61e"} Dec 01 06:59:22 crc kubenswrapper[4632]: I1201 06:59:22.471860 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerDied","Data":"a99200df0555fc4d493d8094f490663f223e8bfedc2bebb13f227af97246d9ef"} Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.195961 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.199806 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.498388 4632 generic.go:334] "Generic (PLEG): container finished" podID="c21cb575-7b86-455a-8999-caff25488217" containerID="0f49b4c7a006aa5c3aa8ed823897a22b4b943474570692520da131ef2bd2e438" exitCode=0 Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.499541 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerDied","Data":"0f49b4c7a006aa5c3aa8ed823897a22b4b943474570692520da131ef2bd2e438"} Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.581019 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632561 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632730 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632809 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632838 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th99c\" (UniqueName: \"kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632863 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632885 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.632921 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml\") pod \"c21cb575-7b86-455a-8999-caff25488217\" (UID: \"c21cb575-7b86-455a-8999-caff25488217\") " Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.633067 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.633884 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.633944 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.639656 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c" (OuterVolumeSpecName: "kube-api-access-th99c") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "kube-api-access-th99c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.639969 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts" (OuterVolumeSpecName: "scripts") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.657370 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.692775 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.692913 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.692959 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.704919 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data" (OuterVolumeSpecName: "config-data") pod "c21cb575-7b86-455a-8999-caff25488217" (UID: "c21cb575-7b86-455a-8999-caff25488217"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.725586 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.728470 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736708 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736749 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736764 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736775 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c21cb575-7b86-455a-8999-caff25488217-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736785 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c21cb575-7b86-455a-8999-caff25488217-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:23 crc kubenswrapper[4632]: I1201 06:59:23.736795 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th99c\" (UniqueName: \"kubernetes.io/projected/c21cb575-7b86-455a-8999-caff25488217-kube-api-access-th99c\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.513461 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.516854 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c21cb575-7b86-455a-8999-caff25488217","Type":"ContainerDied","Data":"a4909d1c1cdeca1f820d85556ef30d31c9b95b3930faea6e05bf9e8d75b6973e"} Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.516985 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.517045 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.517068 4632 scope.go:117] "RemoveContainer" containerID="83e4c8c3ea9beeb56e350d29fe62112b42ca9c17082739689c7b1f8275a1883f" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.547254 4632 scope.go:117] "RemoveContainer" containerID="f29948b23b942b17bd546f54fccce423871de610e1d66b6a1ab90b21536cf61e" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.557471 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.576394 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.584994 4632 scope.go:117] "RemoveContainer" containerID="a99200df0555fc4d493d8094f490663f223e8bfedc2bebb13f227af97246d9ef" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611173 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:24 crc kubenswrapper[4632]: E1201 06:59:24.611652 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-notification-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611671 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-notification-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: E1201 06:59:24.611699 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="sg-core" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611705 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="sg-core" Dec 01 06:59:24 crc kubenswrapper[4632]: E1201 06:59:24.611730 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-central-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611736 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-central-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: E1201 06:59:24.611748 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="proxy-httpd" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611753 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="proxy-httpd" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611941 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="sg-core" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611955 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="proxy-httpd" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611969 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-central-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.611979 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21cb575-7b86-455a-8999-caff25488217" containerName="ceilometer-notification-agent" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.613592 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.615131 4632 scope.go:117] "RemoveContainer" containerID="0f49b4c7a006aa5c3aa8ed823897a22b4b943474570692520da131ef2bd2e438" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.625637 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.625817 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.650176 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.650839 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.650912 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.650972 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.651056 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.651100 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.651171 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdmlx\" (UniqueName: \"kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.651224 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753038 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753126 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753176 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753217 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdmlx\" (UniqueName: \"kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753244 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753276 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.753340 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.754012 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.754184 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.759164 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.760778 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.766187 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.769558 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c21cb575-7b86-455a-8999-caff25488217" path="/var/lib/kubelet/pods/c21cb575-7b86-455a-8999-caff25488217/volumes" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.771739 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdmlx\" (UniqueName: \"kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.774563 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " pod="openstack/ceilometer-0" Dec 01 06:59:24 crc kubenswrapper[4632]: I1201 06:59:24.936592 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 06:59:25 crc kubenswrapper[4632]: I1201 06:59:25.490860 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:25 crc kubenswrapper[4632]: I1201 06:59:25.532312 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerStarted","Data":"272253bb148d4f4b0b8359de5fd66387e1bcf9974bd04eef5fdbc30de7f81461"} Dec 01 06:59:26 crc kubenswrapper[4632]: I1201 06:59:26.188998 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:26 crc kubenswrapper[4632]: I1201 06:59:26.492153 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 01 06:59:26 crc kubenswrapper[4632]: I1201 06:59:26.542515 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerStarted","Data":"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2"} Dec 01 06:59:27 crc kubenswrapper[4632]: I1201 06:59:27.555494 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerStarted","Data":"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97"} Dec 01 06:59:28 crc kubenswrapper[4632]: I1201 06:59:28.568679 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerStarted","Data":"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33"} Dec 01 06:59:28 crc kubenswrapper[4632]: I1201 06:59:28.814325 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.312574 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-hdlnh"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.314110 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.315803 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.316670 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.322103 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hdlnh"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.450659 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.452181 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.454209 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.471057 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.487860 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpvbd\" (UniqueName: \"kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.488025 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.488164 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.488425 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.525610 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.527250 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.529652 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.549982 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.551280 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.563747 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.586971 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerStarted","Data":"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a"} Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.588099 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589689 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpvbd\" (UniqueName: \"kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589727 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589758 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589805 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589831 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589850 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589901 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.589921 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9t75m\" (UniqueName: \"kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.597807 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.603729 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.603785 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.612703 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.618730 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.625274 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpvbd\" (UniqueName: \"kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd\") pod \"nova-cell0-cell-mapping-hdlnh\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.633722 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695610 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695674 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgv7p\" (UniqueName: \"kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695699 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695720 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695775 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695808 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695841 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695863 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695928 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.695958 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9t75m\" (UniqueName: \"kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.696007 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb6pq\" (UniqueName: \"kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.696611 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.705369 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.711776 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.715878 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.718816 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.720269 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.257429592 podStartE2EDuration="5.7202501s" podCreationTimestamp="2025-12-01 06:59:24 +0000 UTC" firstStartedPulling="2025-12-01 06:59:25.511762839 +0000 UTC m=+975.076775812" lastFinishedPulling="2025-12-01 06:59:28.974583347 +0000 UTC m=+978.539596320" observedRunningTime="2025-12-01 06:59:29.64380634 +0000 UTC m=+979.208819314" watchObservedRunningTime="2025-12-01 06:59:29.7202501 +0000 UTC m=+979.285263074" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.722467 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9t75m\" (UniqueName: \"kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m\") pod \"nova-api-0\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.730245 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.730533 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.740380 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.742000 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.747402 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.767874 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798270 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb6pq\" (UniqueName: \"kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798542 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798589 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgv7p\" (UniqueName: \"kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798644 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798684 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798718 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.798790 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.801891 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.804486 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.805160 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.809152 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.816939 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.818092 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb6pq\" (UniqueName: \"kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq\") pod \"nova-metadata-0\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.828814 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgv7p\" (UniqueName: \"kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p\") pod \"nova-scheduler-0\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.855412 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.872175 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900589 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rj79j\" (UniqueName: \"kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900658 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900684 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900729 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900770 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900799 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900863 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900895 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs78d\" (UniqueName: \"kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:29 crc kubenswrapper[4632]: I1201 06:59:29.900933 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.003999 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.004102 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.004172 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.004331 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005517 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005710 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs78d\" (UniqueName: \"kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005808 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005867 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rj79j\" (UniqueName: \"kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005930 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.005978 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.007766 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.008991 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.016574 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.020213 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.020368 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.033468 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.040629 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs78d\" (UniqueName: \"kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d\") pod \"nova-cell1-novncproxy-0\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.047487 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rj79j\" (UniqueName: \"kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j\") pod \"dnsmasq-dns-88794ccbc-bcgk4\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.058630 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.087944 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.166951 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-hdlnh"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.332973 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.373451 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-d2q4m"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.375123 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.377778 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.378173 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.403698 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-d2q4m"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.433789 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.511548 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:30 crc kubenswrapper[4632]: W1201 06:59:30.513841 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbc60b8c_82bf_4773_95e1_e259a9aba512.slice/crio-2546a3332cc6520e917000ee980ffb6124258da23dc959e532139e28583e578d WatchSource:0}: Error finding container 2546a3332cc6520e917000ee980ffb6124258da23dc959e532139e28583e578d: Status 404 returned error can't find the container with id 2546a3332cc6520e917000ee980ffb6124258da23dc959e532139e28583e578d Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.525809 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.525960 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.526134 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.526162 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qjnv\" (UniqueName: \"kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.596103 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerStarted","Data":"bc4b705b7f681999100e4032e63aff51d56dfdc2a7872b97b546d606944a11c4"} Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.597629 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7e6b455e-f763-4e20-8bc5-a5a3e7357970","Type":"ContainerStarted","Data":"fe4fc9ea316fcdaf1d410f6d115376fd90758651b41790f842b0ae870888ae62"} Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.598864 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerStarted","Data":"2546a3332cc6520e917000ee980ffb6124258da23dc959e532139e28583e578d"} Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.600214 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hdlnh" event={"ID":"80cde8b2-5981-4e87-a781-db78ead1e0e1","Type":"ContainerStarted","Data":"274a9456e3bd82048198f00dfa297466f4dca496059f6fdf2f21e8fc184a0edd"} Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.600271 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hdlnh" event={"ID":"80cde8b2-5981-4e87-a781-db78ead1e0e1","Type":"ContainerStarted","Data":"63523c81161e0c83e6dbc3ae8472c71689730e79664264e5a5803901f2dfbef3"} Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.613457 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-hdlnh" podStartSLOduration=1.61343769 podStartE2EDuration="1.61343769s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:30.613021354 +0000 UTC m=+980.178034318" watchObservedRunningTime="2025-12-01 06:59:30.61343769 +0000 UTC m=+980.178450664" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.627771 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.627823 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qjnv\" (UniqueName: \"kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.627950 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.628056 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.635648 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.636046 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.640749 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.647310 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qjnv\" (UniqueName: \"kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv\") pod \"nova-cell1-conductor-db-sync-d2q4m\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.665820 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.681568 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 06:59:30 crc kubenswrapper[4632]: I1201 06:59:30.711711 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.201129 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-d2q4m"] Dec 01 06:59:31 crc kubenswrapper[4632]: W1201 06:59:31.205437 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1ce8f92_e8d7_4a6f_8d9c_bca1c7061a85.slice/crio-4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa WatchSource:0}: Error finding container 4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa: Status 404 returned error can't find the container with id 4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.666015 4632 generic.go:334] "Generic (PLEG): container finished" podID="16718254-ede5-4551-a80a-d5a80873630b" containerID="1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2" exitCode=0 Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.666343 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" event={"ID":"16718254-ede5-4551-a80a-d5a80873630b","Type":"ContainerDied","Data":"1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2"} Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.666397 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" event={"ID":"16718254-ede5-4551-a80a-d5a80873630b","Type":"ContainerStarted","Data":"1fbd64ca2b27c84be76c82d0a76d7e1321e0c78f919cf123b8220d341426a3ee"} Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.691332 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0343305-d3cf-4375-b5c8-1cd5c5f6054c","Type":"ContainerStarted","Data":"b71b42c57b17b65fc7b01fb0b63fe768c5002f24c010e44956585c0117acd46e"} Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.740030 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" event={"ID":"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85","Type":"ContainerStarted","Data":"e445f124ec56419305d784c1676a9f4c6dc844e88ab40d2431e04b07f2507011"} Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.740094 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" event={"ID":"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85","Type":"ContainerStarted","Data":"4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa"} Dec 01 06:59:31 crc kubenswrapper[4632]: I1201 06:59:31.797251 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" podStartSLOduration=1.7972329500000002 podStartE2EDuration="1.79723295s" podCreationTimestamp="2025-12-01 06:59:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:31.763770498 +0000 UTC m=+981.328783471" watchObservedRunningTime="2025-12-01 06:59:31.79723295 +0000 UTC m=+981.362245923" Dec 01 06:59:32 crc kubenswrapper[4632]: I1201 06:59:32.769379 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:32 crc kubenswrapper[4632]: I1201 06:59:32.769422 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" event={"ID":"16718254-ede5-4551-a80a-d5a80873630b","Type":"ContainerStarted","Data":"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e"} Dec 01 06:59:32 crc kubenswrapper[4632]: I1201 06:59:32.770071 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" podStartSLOduration=3.770049248 podStartE2EDuration="3.770049248s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:32.763817322 +0000 UTC m=+982.328830296" watchObservedRunningTime="2025-12-01 06:59:32.770049248 +0000 UTC m=+982.335062211" Dec 01 06:59:33 crc kubenswrapper[4632]: I1201 06:59:33.272767 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:33 crc kubenswrapper[4632]: I1201 06:59:33.280743 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.782679 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerStarted","Data":"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788"} Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.783407 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerStarted","Data":"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969"} Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.785246 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0343305-d3cf-4375-b5c8-1cd5c5f6054c","Type":"ContainerStarted","Data":"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9"} Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.785445 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9" gracePeriod=30 Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.790306 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7e6b455e-f763-4e20-8bc5-a5a3e7357970","Type":"ContainerStarted","Data":"92b389fad9f5bdd442e682098490461d838b5bf0e859af3e556e2f559554f446"} Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.804912 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.536533909 podStartE2EDuration="5.804896225s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="2025-12-01 06:59:30.393811427 +0000 UTC m=+979.958824400" lastFinishedPulling="2025-12-01 06:59:33.662173743 +0000 UTC m=+983.227186716" observedRunningTime="2025-12-01 06:59:34.800893303 +0000 UTC m=+984.365906276" watchObservedRunningTime="2025-12-01 06:59:34.804896225 +0000 UTC m=+984.369909198" Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.840687 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.874728003 podStartE2EDuration="5.840676168s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="2025-12-01 06:59:30.695672304 +0000 UTC m=+980.260685277" lastFinishedPulling="2025-12-01 06:59:33.661620468 +0000 UTC m=+983.226633442" observedRunningTime="2025-12-01 06:59:34.830397249 +0000 UTC m=+984.395410221" watchObservedRunningTime="2025-12-01 06:59:34.840676168 +0000 UTC m=+984.405689142" Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.873251 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 06:59:34 crc kubenswrapper[4632]: I1201 06:59:34.885223 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.6521429789999997 podStartE2EDuration="5.885199054s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="2025-12-01 06:59:30.432794453 +0000 UTC m=+979.997807426" lastFinishedPulling="2025-12-01 06:59:33.665850528 +0000 UTC m=+983.230863501" observedRunningTime="2025-12-01 06:59:34.844164598 +0000 UTC m=+984.409177572" watchObservedRunningTime="2025-12-01 06:59:34.885199054 +0000 UTC m=+984.450212027" Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.059875 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.807164 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerStarted","Data":"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3"} Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.807669 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerStarted","Data":"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579"} Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.807602 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-metadata" containerID="cri-o://40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" gracePeriod=30 Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.807261 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-log" containerID="cri-o://f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" gracePeriod=30 Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.809265 4632 generic.go:334] "Generic (PLEG): container finished" podID="a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" containerID="e445f124ec56419305d784c1676a9f4c6dc844e88ab40d2431e04b07f2507011" exitCode=0 Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.809325 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" event={"ID":"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85","Type":"ContainerDied","Data":"e445f124ec56419305d784c1676a9f4c6dc844e88ab40d2431e04b07f2507011"} Dec 01 06:59:35 crc kubenswrapper[4632]: I1201 06:59:35.848758 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.200634171 podStartE2EDuration="6.84874761s" podCreationTimestamp="2025-12-01 06:59:29 +0000 UTC" firstStartedPulling="2025-12-01 06:59:30.515791455 +0000 UTC m=+980.080804428" lastFinishedPulling="2025-12-01 06:59:35.163904893 +0000 UTC m=+984.728917867" observedRunningTime="2025-12-01 06:59:35.834326427 +0000 UTC m=+985.399339390" watchObservedRunningTime="2025-12-01 06:59:35.84874761 +0000 UTC m=+985.413760584" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.372066 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.411947 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs\") pod \"bbc60b8c-82bf-4773-95e1-e259a9aba512\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.412032 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle\") pod \"bbc60b8c-82bf-4773-95e1-e259a9aba512\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.412082 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6pq\" (UniqueName: \"kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq\") pod \"bbc60b8c-82bf-4773-95e1-e259a9aba512\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.412245 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data\") pod \"bbc60b8c-82bf-4773-95e1-e259a9aba512\" (UID: \"bbc60b8c-82bf-4773-95e1-e259a9aba512\") " Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.413014 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs" (OuterVolumeSpecName: "logs") pod "bbc60b8c-82bf-4773-95e1-e259a9aba512" (UID: "bbc60b8c-82bf-4773-95e1-e259a9aba512"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.417321 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq" (OuterVolumeSpecName: "kube-api-access-sb6pq") pod "bbc60b8c-82bf-4773-95e1-e259a9aba512" (UID: "bbc60b8c-82bf-4773-95e1-e259a9aba512"). InnerVolumeSpecName "kube-api-access-sb6pq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.436005 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data" (OuterVolumeSpecName: "config-data") pod "bbc60b8c-82bf-4773-95e1-e259a9aba512" (UID: "bbc60b8c-82bf-4773-95e1-e259a9aba512"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.436619 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbc60b8c-82bf-4773-95e1-e259a9aba512" (UID: "bbc60b8c-82bf-4773-95e1-e259a9aba512"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.514260 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.514298 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bbc60b8c-82bf-4773-95e1-e259a9aba512-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.514309 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbc60b8c-82bf-4773-95e1-e259a9aba512-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.514322 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6pq\" (UniqueName: \"kubernetes.io/projected/bbc60b8c-82bf-4773-95e1-e259a9aba512-kube-api-access-sb6pq\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824283 4632 generic.go:334] "Generic (PLEG): container finished" podID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerID="40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" exitCode=0 Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824317 4632 generic.go:334] "Generic (PLEG): container finished" podID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerID="f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" exitCode=143 Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824410 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824867 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerDied","Data":"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3"} Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824932 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerDied","Data":"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579"} Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824946 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"bbc60b8c-82bf-4773-95e1-e259a9aba512","Type":"ContainerDied","Data":"2546a3332cc6520e917000ee980ffb6124258da23dc959e532139e28583e578d"} Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.824968 4632 scope.go:117] "RemoveContainer" containerID="40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.827044 4632 generic.go:334] "Generic (PLEG): container finished" podID="80cde8b2-5981-4e87-a781-db78ead1e0e1" containerID="274a9456e3bd82048198f00dfa297466f4dca496059f6fdf2f21e8fc184a0edd" exitCode=0 Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.827727 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hdlnh" event={"ID":"80cde8b2-5981-4e87-a781-db78ead1e0e1","Type":"ContainerDied","Data":"274a9456e3bd82048198f00dfa297466f4dca496059f6fdf2f21e8fc184a0edd"} Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.854066 4632 scope.go:117] "RemoveContainer" containerID="f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.869877 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.882819 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.899451 4632 scope.go:117] "RemoveContainer" containerID="40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" Dec 01 06:59:36 crc kubenswrapper[4632]: E1201 06:59:36.906079 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3\": container with ID starting with 40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3 not found: ID does not exist" containerID="40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906112 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3"} err="failed to get container status \"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3\": rpc error: code = NotFound desc = could not find container \"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3\": container with ID starting with 40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3 not found: ID does not exist" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906137 4632 scope.go:117] "RemoveContainer" containerID="f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" Dec 01 06:59:36 crc kubenswrapper[4632]: E1201 06:59:36.906476 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579\": container with ID starting with f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579 not found: ID does not exist" containerID="f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906500 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579"} err="failed to get container status \"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579\": rpc error: code = NotFound desc = could not find container \"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579\": container with ID starting with f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579 not found: ID does not exist" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906516 4632 scope.go:117] "RemoveContainer" containerID="40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906702 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3"} err="failed to get container status \"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3\": rpc error: code = NotFound desc = could not find container \"40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3\": container with ID starting with 40f11718a781650c5b894b97476c8e4740c015ef117cb34ff325eeefc0a628d3 not found: ID does not exist" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906722 4632 scope.go:117] "RemoveContainer" containerID="f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.906875 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579"} err="failed to get container status \"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579\": rpc error: code = NotFound desc = could not find container \"f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579\": container with ID starting with f70514c4594551c1f3e65b356326abdcab8db3af1a1136783163104763bdc579 not found: ID does not exist" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.914370 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:36 crc kubenswrapper[4632]: E1201 06:59:36.914862 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-log" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.914882 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-log" Dec 01 06:59:36 crc kubenswrapper[4632]: E1201 06:59:36.914918 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-metadata" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.914924 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-metadata" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.915142 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-metadata" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.915162 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" containerName="nova-metadata-log" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.916229 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.918426 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.918631 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 06:59:36 crc kubenswrapper[4632]: I1201 06:59:36.923505 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.024154 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.024720 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.024764 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.024796 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.025010 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99ldv\" (UniqueName: \"kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.125921 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.126231 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.126277 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.126301 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.126729 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.127117 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99ldv\" (UniqueName: \"kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.129656 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.136107 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.136381 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.140815 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99ldv\" (UniqueName: \"kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv\") pod \"nova-metadata-0\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.192020 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.270053 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.332215 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts\") pod \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.332287 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle\") pod \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.332327 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data\") pod \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.332527 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qjnv\" (UniqueName: \"kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv\") pod \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\" (UID: \"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85\") " Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.337606 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv" (OuterVolumeSpecName: "kube-api-access-7qjnv") pod "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" (UID: "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85"). InnerVolumeSpecName "kube-api-access-7qjnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.338208 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts" (OuterVolumeSpecName: "scripts") pod "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" (UID: "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.360405 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data" (OuterVolumeSpecName: "config-data") pod "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" (UID: "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.366593 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" (UID: "a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.436193 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.436226 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.436240 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.436251 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qjnv\" (UniqueName: \"kubernetes.io/projected/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85-kube-api-access-7qjnv\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.681174 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:37 crc kubenswrapper[4632]: W1201 06:59:37.684617 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0c8a4c5_5ccf_4167_89ba_9c876d889f84.slice/crio-5d454ba9fa76bf1e0bee3df93377fac7392c0f75c4ddfd972b62e6757ef25067 WatchSource:0}: Error finding container 5d454ba9fa76bf1e0bee3df93377fac7392c0f75c4ddfd972b62e6757ef25067: Status 404 returned error can't find the container with id 5d454ba9fa76bf1e0bee3df93377fac7392c0f75c4ddfd972b62e6757ef25067 Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.840564 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerStarted","Data":"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8"} Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.840635 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerStarted","Data":"5d454ba9fa76bf1e0bee3df93377fac7392c0f75c4ddfd972b62e6757ef25067"} Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.843011 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.843082 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-d2q4m" event={"ID":"a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85","Type":"ContainerDied","Data":"4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa"} Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.843136 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b76532905a47934331bc1f770f5bd6431b6238ccf56f6a28fa33604f8cac3aa" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.920684 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 06:59:37 crc kubenswrapper[4632]: E1201 06:59:37.921390 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" containerName="nova-cell1-conductor-db-sync" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.921415 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" containerName="nova-cell1-conductor-db-sync" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.921672 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" containerName="nova-cell1-conductor-db-sync" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.922563 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.926051 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.945448 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.947599 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.947779 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49rgd\" (UniqueName: \"kubernetes.io/projected/da84cfe8-1321-40a1-a05b-14194e1e7d48-kube-api-access-49rgd\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:37 crc kubenswrapper[4632]: I1201 06:59:37.947847 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.051399 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49rgd\" (UniqueName: \"kubernetes.io/projected/da84cfe8-1321-40a1-a05b-14194e1e7d48-kube-api-access-49rgd\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.051538 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.051841 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.056965 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.058536 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/da84cfe8-1321-40a1-a05b-14194e1e7d48-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.070391 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49rgd\" (UniqueName: \"kubernetes.io/projected/da84cfe8-1321-40a1-a05b-14194e1e7d48-kube-api-access-49rgd\") pod \"nova-cell1-conductor-0\" (UID: \"da84cfe8-1321-40a1-a05b-14194e1e7d48\") " pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.178176 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.254911 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle\") pod \"80cde8b2-5981-4e87-a781-db78ead1e0e1\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.255009 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data\") pod \"80cde8b2-5981-4e87-a781-db78ead1e0e1\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.255156 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts\") pod \"80cde8b2-5981-4e87-a781-db78ead1e0e1\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.255250 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpvbd\" (UniqueName: \"kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd\") pod \"80cde8b2-5981-4e87-a781-db78ead1e0e1\" (UID: \"80cde8b2-5981-4e87-a781-db78ead1e0e1\") " Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.259046 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts" (OuterVolumeSpecName: "scripts") pod "80cde8b2-5981-4e87-a781-db78ead1e0e1" (UID: "80cde8b2-5981-4e87-a781-db78ead1e0e1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.262092 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd" (OuterVolumeSpecName: "kube-api-access-kpvbd") pod "80cde8b2-5981-4e87-a781-db78ead1e0e1" (UID: "80cde8b2-5981-4e87-a781-db78ead1e0e1"). InnerVolumeSpecName "kube-api-access-kpvbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.283256 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data" (OuterVolumeSpecName: "config-data") pod "80cde8b2-5981-4e87-a781-db78ead1e0e1" (UID: "80cde8b2-5981-4e87-a781-db78ead1e0e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.294067 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80cde8b2-5981-4e87-a781-db78ead1e0e1" (UID: "80cde8b2-5981-4e87-a781-db78ead1e0e1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.329749 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.358052 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.358091 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpvbd\" (UniqueName: \"kubernetes.io/projected/80cde8b2-5981-4e87-a781-db78ead1e0e1-kube-api-access-kpvbd\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.358105 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.358117 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/80cde8b2-5981-4e87-a781-db78ead1e0e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.727028 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 01 06:59:38 crc kubenswrapper[4632]: W1201 06:59:38.728491 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda84cfe8_1321_40a1_a05b_14194e1e7d48.slice/crio-791ff9fcc69109611e75779701353c31afb51e98d0a0762d52766b97a2bc2011 WatchSource:0}: Error finding container 791ff9fcc69109611e75779701353c31afb51e98d0a0762d52766b97a2bc2011: Status 404 returned error can't find the container with id 791ff9fcc69109611e75779701353c31afb51e98d0a0762d52766b97a2bc2011 Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.762409 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbc60b8c-82bf-4773-95e1-e259a9aba512" path="/var/lib/kubelet/pods/bbc60b8c-82bf-4773-95e1-e259a9aba512/volumes" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.860579 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-hdlnh" event={"ID":"80cde8b2-5981-4e87-a781-db78ead1e0e1","Type":"ContainerDied","Data":"63523c81161e0c83e6dbc3ae8472c71689730e79664264e5a5803901f2dfbef3"} Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.860655 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63523c81161e0c83e6dbc3ae8472c71689730e79664264e5a5803901f2dfbef3" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.860800 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-hdlnh" Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.865231 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"da84cfe8-1321-40a1-a05b-14194e1e7d48","Type":"ContainerStarted","Data":"791ff9fcc69109611e75779701353c31afb51e98d0a0762d52766b97a2bc2011"} Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.868453 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerStarted","Data":"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45"} Dec 01 06:59:38 crc kubenswrapper[4632]: I1201 06:59:38.890190 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.890169807 podStartE2EDuration="2.890169807s" podCreationTimestamp="2025-12-01 06:59:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:38.886416316 +0000 UTC m=+988.451429289" watchObservedRunningTime="2025-12-01 06:59:38.890169807 +0000 UTC m=+988.455182780" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.028421 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.028680 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" containerName="nova-scheduler-scheduler" containerID="cri-o://92b389fad9f5bdd442e682098490461d838b5bf0e859af3e556e2f559554f446" gracePeriod=30 Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.035573 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.035895 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-log" containerID="cri-o://9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" gracePeriod=30 Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.036074 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-api" containerID="cri-o://21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" gracePeriod=30 Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.045005 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.573284 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.688126 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data\") pod \"152bc7eb-abfb-41dc-9f15-afcae2e96770\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.688218 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9t75m\" (UniqueName: \"kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m\") pod \"152bc7eb-abfb-41dc-9f15-afcae2e96770\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.688245 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs\") pod \"152bc7eb-abfb-41dc-9f15-afcae2e96770\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.688297 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle\") pod \"152bc7eb-abfb-41dc-9f15-afcae2e96770\" (UID: \"152bc7eb-abfb-41dc-9f15-afcae2e96770\") " Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.688756 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs" (OuterVolumeSpecName: "logs") pod "152bc7eb-abfb-41dc-9f15-afcae2e96770" (UID: "152bc7eb-abfb-41dc-9f15-afcae2e96770"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.689034 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/152bc7eb-abfb-41dc-9f15-afcae2e96770-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.696676 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m" (OuterVolumeSpecName: "kube-api-access-9t75m") pod "152bc7eb-abfb-41dc-9f15-afcae2e96770" (UID: "152bc7eb-abfb-41dc-9f15-afcae2e96770"). InnerVolumeSpecName "kube-api-access-9t75m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.721029 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "152bc7eb-abfb-41dc-9f15-afcae2e96770" (UID: "152bc7eb-abfb-41dc-9f15-afcae2e96770"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.721823 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data" (OuterVolumeSpecName: "config-data") pod "152bc7eb-abfb-41dc-9f15-afcae2e96770" (UID: "152bc7eb-abfb-41dc-9f15-afcae2e96770"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.791305 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.792425 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9t75m\" (UniqueName: \"kubernetes.io/projected/152bc7eb-abfb-41dc-9f15-afcae2e96770-kube-api-access-9t75m\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.792463 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/152bc7eb-abfb-41dc-9f15-afcae2e96770-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.879503 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"da84cfe8-1321-40a1-a05b-14194e1e7d48","Type":"ContainerStarted","Data":"1af3f55bcb5d536a85e95cc36b71635c70eaf033559b76ef3675566a819c0759"} Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.880954 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.882498 4632 generic.go:334] "Generic (PLEG): container finished" podID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerID="21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" exitCode=0 Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.882519 4632 generic.go:334] "Generic (PLEG): container finished" podID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerID="9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" exitCode=143 Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.883065 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.890834 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerDied","Data":"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788"} Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.890868 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerDied","Data":"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969"} Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.890908 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"152bc7eb-abfb-41dc-9f15-afcae2e96770","Type":"ContainerDied","Data":"bc4b705b7f681999100e4032e63aff51d56dfdc2a7872b97b546d606944a11c4"} Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.890928 4632 scope.go:117] "RemoveContainer" containerID="21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.906555 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.906534112 podStartE2EDuration="2.906534112s" podCreationTimestamp="2025-12-01 06:59:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:39.905583488 +0000 UTC m=+989.470596461" watchObservedRunningTime="2025-12-01 06:59:39.906534112 +0000 UTC m=+989.471547085" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.919392 4632 scope.go:117] "RemoveContainer" containerID="9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.920998 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.926692 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943152 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: E1201 06:59:39.943660 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80cde8b2-5981-4e87-a781-db78ead1e0e1" containerName="nova-manage" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943686 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="80cde8b2-5981-4e87-a781-db78ead1e0e1" containerName="nova-manage" Dec 01 06:59:39 crc kubenswrapper[4632]: E1201 06:59:39.943701 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-log" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943707 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-log" Dec 01 06:59:39 crc kubenswrapper[4632]: E1201 06:59:39.943716 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-api" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943722 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-api" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943963 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-log" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943985 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="80cde8b2-5981-4e87-a781-db78ead1e0e1" containerName="nova-manage" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.943999 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" containerName="nova-api-api" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.945929 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.952049 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.975955 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.985698 4632 scope.go:117] "RemoveContainer" containerID="21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" Dec 01 06:59:39 crc kubenswrapper[4632]: E1201 06:59:39.986219 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788\": container with ID starting with 21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788 not found: ID does not exist" containerID="21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.986264 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788"} err="failed to get container status \"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788\": rpc error: code = NotFound desc = could not find container \"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788\": container with ID starting with 21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788 not found: ID does not exist" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.986293 4632 scope.go:117] "RemoveContainer" containerID="9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" Dec 01 06:59:39 crc kubenswrapper[4632]: E1201 06:59:39.986736 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969\": container with ID starting with 9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969 not found: ID does not exist" containerID="9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.986786 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969"} err="failed to get container status \"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969\": rpc error: code = NotFound desc = could not find container \"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969\": container with ID starting with 9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969 not found: ID does not exist" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.986845 4632 scope.go:117] "RemoveContainer" containerID="21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.988662 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788"} err="failed to get container status \"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788\": rpc error: code = NotFound desc = could not find container \"21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788\": container with ID starting with 21e8fea19703371500e93b935b1b5eea8f36b68b97ef22d7c67eba861404f788 not found: ID does not exist" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.992451 4632 scope.go:117] "RemoveContainer" containerID="9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.993314 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969"} err="failed to get container status \"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969\": rpc error: code = NotFound desc = could not find container \"9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969\": container with ID starting with 9e7fd3630b04975a3b0271c2caadac1b6d4def926c8bd0295757f96c72289969 not found: ID does not exist" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.999140 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.999551 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.999689 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:39 crc kubenswrapper[4632]: I1201 06:59:39.999729 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rf25\" (UniqueName: \"kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.090691 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.102328 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.102507 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rf25\" (UniqueName: \"kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.102761 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.103090 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.105121 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.111198 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.121822 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.124948 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rf25\" (UniqueName: \"kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25\") pod \"nova-api-0\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.148260 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.150383 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="dnsmasq-dns" containerID="cri-o://c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd" gracePeriod=10 Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.296193 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.533262 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616434 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7m6gb\" (UniqueName: \"kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616548 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616599 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616684 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616746 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.616808 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config\") pod \"e967624e-2612-4b89-ae6c-44b3b914b8ad\" (UID: \"e967624e-2612-4b89-ae6c-44b3b914b8ad\") " Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.624537 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb" (OuterVolumeSpecName: "kube-api-access-7m6gb") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "kube-api-access-7m6gb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.656802 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.659203 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.659847 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config" (OuterVolumeSpecName: "config") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.660628 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.667940 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e967624e-2612-4b89-ae6c-44b3b914b8ad" (UID: "e967624e-2612-4b89-ae6c-44b3b914b8ad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718084 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718107 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718117 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-config\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718128 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7m6gb\" (UniqueName: \"kubernetes.io/projected/e967624e-2612-4b89-ae6c-44b3b914b8ad-kube-api-access-7m6gb\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718138 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.718147 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e967624e-2612-4b89-ae6c-44b3b914b8ad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.771487 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="152bc7eb-abfb-41dc-9f15-afcae2e96770" path="/var/lib/kubelet/pods/152bc7eb-abfb-41dc-9f15-afcae2e96770/volumes" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.772143 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.899090 4632 generic.go:334] "Generic (PLEG): container finished" podID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" containerID="92b389fad9f5bdd442e682098490461d838b5bf0e859af3e556e2f559554f446" exitCode=0 Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.899195 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7e6b455e-f763-4e20-8bc5-a5a3e7357970","Type":"ContainerDied","Data":"92b389fad9f5bdd442e682098490461d838b5bf0e859af3e556e2f559554f446"} Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.904410 4632 generic.go:334] "Generic (PLEG): container finished" podID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerID="c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd" exitCode=0 Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.904461 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.904461 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" event={"ID":"e967624e-2612-4b89-ae6c-44b3b914b8ad","Type":"ContainerDied","Data":"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd"} Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.904546 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68c76996b9-8gcj7" event={"ID":"e967624e-2612-4b89-ae6c-44b3b914b8ad","Type":"ContainerDied","Data":"788af742cd5d2cab8011bb4b041935580276587cc0c5cc4338a684b5105cc6ff"} Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.904566 4632 scope.go:117] "RemoveContainer" containerID="c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd" Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.907051 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerStarted","Data":"93b104b7c2b7d8f02325c355387596c43d1ca001ec652d15637d0acea1ba301e"} Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.910384 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-log" containerID="cri-o://186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" gracePeriod=30 Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.910523 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-metadata" containerID="cri-o://048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" gracePeriod=30 Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.935557 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.958852 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68c76996b9-8gcj7"] Dec 01 06:59:40 crc kubenswrapper[4632]: I1201 06:59:40.973181 4632 scope.go:117] "RemoveContainer" containerID="2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.061000 4632 scope.go:117] "RemoveContainer" containerID="c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd" Dec 01 06:59:41 crc kubenswrapper[4632]: E1201 06:59:41.061780 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd\": container with ID starting with c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd not found: ID does not exist" containerID="c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.061858 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd"} err="failed to get container status \"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd\": rpc error: code = NotFound desc = could not find container \"c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd\": container with ID starting with c61d3f68f14062f7fa348ba55512c49e7689abfca2940d06ba9794902c35ecbd not found: ID does not exist" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.061906 4632 scope.go:117] "RemoveContainer" containerID="2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59" Dec 01 06:59:41 crc kubenswrapper[4632]: E1201 06:59:41.062512 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59\": container with ID starting with 2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59 not found: ID does not exist" containerID="2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.062570 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59"} err="failed to get container status \"2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59\": rpc error: code = NotFound desc = could not find container \"2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59\": container with ID starting with 2f34dfbadba39982327ab7a04ec4e447d819b45970de0d0813390375ff21dc59 not found: ID does not exist" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.224562 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.244694 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgv7p\" (UniqueName: \"kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p\") pod \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.244819 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data\") pod \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.244968 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle\") pod \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\" (UID: \"7e6b455e-f763-4e20-8bc5-a5a3e7357970\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.254447 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p" (OuterVolumeSpecName: "kube-api-access-vgv7p") pod "7e6b455e-f763-4e20-8bc5-a5a3e7357970" (UID: "7e6b455e-f763-4e20-8bc5-a5a3e7357970"). InnerVolumeSpecName "kube-api-access-vgv7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.270974 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data" (OuterVolumeSpecName: "config-data") pod "7e6b455e-f763-4e20-8bc5-a5a3e7357970" (UID: "7e6b455e-f763-4e20-8bc5-a5a3e7357970"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.273976 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e6b455e-f763-4e20-8bc5-a5a3e7357970" (UID: "7e6b455e-f763-4e20-8bc5-a5a3e7357970"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.352455 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.352578 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgv7p\" (UniqueName: \"kubernetes.io/projected/7e6b455e-f763-4e20-8bc5-a5a3e7357970-kube-api-access-vgv7p\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.352646 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b455e-f763-4e20-8bc5-a5a3e7357970-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.415698 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.455408 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs\") pod \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.455574 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data\") pod \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.455700 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99ldv\" (UniqueName: \"kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv\") pod \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.455764 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs\") pod \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.455885 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle\") pod \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\" (UID: \"a0c8a4c5-5ccf-4167-89ba-9c876d889f84\") " Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.456161 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs" (OuterVolumeSpecName: "logs") pod "a0c8a4c5-5ccf-4167-89ba-9c876d889f84" (UID: "a0c8a4c5-5ccf-4167-89ba-9c876d889f84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.456709 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-logs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.459397 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv" (OuterVolumeSpecName: "kube-api-access-99ldv") pod "a0c8a4c5-5ccf-4167-89ba-9c876d889f84" (UID: "a0c8a4c5-5ccf-4167-89ba-9c876d889f84"). InnerVolumeSpecName "kube-api-access-99ldv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.477010 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data" (OuterVolumeSpecName: "config-data") pod "a0c8a4c5-5ccf-4167-89ba-9c876d889f84" (UID: "a0c8a4c5-5ccf-4167-89ba-9c876d889f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.478932 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0c8a4c5-5ccf-4167-89ba-9c876d889f84" (UID: "a0c8a4c5-5ccf-4167-89ba-9c876d889f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.498626 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a0c8a4c5-5ccf-4167-89ba-9c876d889f84" (UID: "a0c8a4c5-5ccf-4167-89ba-9c876d889f84"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.557299 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99ldv\" (UniqueName: \"kubernetes.io/projected/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-kube-api-access-99ldv\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.557327 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.557336 4632 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.557365 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0c8a4c5-5ccf-4167-89ba-9c876d889f84-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.929345 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerStarted","Data":"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.929441 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerStarted","Data":"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.933443 4632 generic.go:334] "Generic (PLEG): container finished" podID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerID="048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" exitCode=0 Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.933492 4632 generic.go:334] "Generic (PLEG): container finished" podID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerID="186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" exitCode=143 Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.933635 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.933979 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerDied","Data":"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.934013 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerDied","Data":"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.934026 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a0c8a4c5-5ccf-4167-89ba-9c876d889f84","Type":"ContainerDied","Data":"5d454ba9fa76bf1e0bee3df93377fac7392c0f75c4ddfd972b62e6757ef25067"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.934087 4632 scope.go:117] "RemoveContainer" containerID="048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.946215 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7e6b455e-f763-4e20-8bc5-a5a3e7357970","Type":"ContainerDied","Data":"fe4fc9ea316fcdaf1d410f6d115376fd90758651b41790f842b0ae870888ae62"} Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.946560 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.963079 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.963062146 podStartE2EDuration="2.963062146s" podCreationTimestamp="2025-12-01 06:59:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:41.950875428 +0000 UTC m=+991.515888401" watchObservedRunningTime="2025-12-01 06:59:41.963062146 +0000 UTC m=+991.528075120" Dec 01 06:59:41 crc kubenswrapper[4632]: I1201 06:59:41.997400 4632 scope.go:117] "RemoveContainer" containerID="186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.024991 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.041436 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.050753 4632 scope.go:117] "RemoveContainer" containerID="048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.055908 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.059498 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45\": container with ID starting with 048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45 not found: ID does not exist" containerID="048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.059542 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45"} err="failed to get container status \"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45\": rpc error: code = NotFound desc = could not find container \"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45\": container with ID starting with 048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45 not found: ID does not exist" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.059590 4632 scope.go:117] "RemoveContainer" containerID="186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.074763 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8\": container with ID starting with 186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8 not found: ID does not exist" containerID="186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.074899 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8"} err="failed to get container status \"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8\": rpc error: code = NotFound desc = could not find container \"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8\": container with ID starting with 186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8 not found: ID does not exist" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.074964 4632 scope.go:117] "RemoveContainer" containerID="048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.081906 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45"} err="failed to get container status \"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45\": rpc error: code = NotFound desc = could not find container \"048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45\": container with ID starting with 048b35d1708128b2dfad3203a694526e3ac483af2aa70d2290652eeee28c4a45 not found: ID does not exist" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.082017 4632 scope.go:117] "RemoveContainer" containerID="186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.085674 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8"} err="failed to get container status \"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8\": rpc error: code = NotFound desc = could not find container \"186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8\": container with ID starting with 186055e6389eabbd01ef7e07282a398908127d9e46f401887934a4f760a065d8 not found: ID does not exist" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.085720 4632 scope.go:117] "RemoveContainer" containerID="92b389fad9f5bdd442e682098490461d838b5bf0e859af3e556e2f559554f446" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.098102 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.115714 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.116753 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-metadata" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.116780 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-metadata" Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.116831 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-log" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.116838 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-log" Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.116859 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" containerName="nova-scheduler-scheduler" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.116865 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" containerName="nova-scheduler-scheduler" Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.116879 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="dnsmasq-dns" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.116886 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="dnsmasq-dns" Dec 01 06:59:42 crc kubenswrapper[4632]: E1201 06:59:42.116899 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="init" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.116906 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="init" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.117296 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" containerName="nova-scheduler-scheduler" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.117331 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" containerName="dnsmasq-dns" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.117376 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-log" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.117394 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" containerName="nova-metadata-metadata" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.118740 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.121201 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.139988 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.148945 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.150633 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.152640 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.153313 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.163695 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293649 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293738 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dsbg\" (UniqueName: \"kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293863 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293903 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293928 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293951 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkggz\" (UniqueName: \"kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.293986 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.294310 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396231 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396279 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396304 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396333 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkggz\" (UniqueName: \"kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396390 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396439 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396464 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.396507 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dsbg\" (UniqueName: \"kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.397501 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.401723 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.402109 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.402341 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.401780 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.403315 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.411970 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dsbg\" (UniqueName: \"kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg\") pod \"nova-metadata-0\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.414865 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkggz\" (UniqueName: \"kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz\") pod \"nova-scheduler-0\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.447097 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.467942 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.763600 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e6b455e-f763-4e20-8bc5-a5a3e7357970" path="/var/lib/kubelet/pods/7e6b455e-f763-4e20-8bc5-a5a3e7357970/volumes" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.764252 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0c8a4c5-5ccf-4167-89ba-9c876d889f84" path="/var/lib/kubelet/pods/a0c8a4c5-5ccf-4167-89ba-9c876d889f84/volumes" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.764910 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e967624e-2612-4b89-ae6c-44b3b914b8ad" path="/var/lib/kubelet/pods/e967624e-2612-4b89-ae6c-44b3b914b8ad/volumes" Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.879849 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.953250 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 06:59:42 crc kubenswrapper[4632]: W1201 06:59:42.953439 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00425bd3_e9db_4a5b_a064_9c75692cf6c1.slice/crio-dd18d0901f5d9df5047e0880eba834d2772354262890c0dc6cd80c064b87a40e WatchSource:0}: Error finding container dd18d0901f5d9df5047e0880eba834d2772354262890c0dc6cd80c064b87a40e: Status 404 returned error can't find the container with id dd18d0901f5d9df5047e0880eba834d2772354262890c0dc6cd80c064b87a40e Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.971855 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d43821ac-9a83-469d-9827-4f7b9d0d42c1","Type":"ContainerStarted","Data":"4ea9bf33f0c9105371c61ba53e38bb848ab8a821f98699df17dde6c0645cdd6b"} Dec 01 06:59:42 crc kubenswrapper[4632]: I1201 06:59:42.973149 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerStarted","Data":"dd18d0901f5d9df5047e0880eba834d2772354262890c0dc6cd80c064b87a40e"} Dec 01 06:59:43 crc kubenswrapper[4632]: I1201 06:59:43.353960 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 01 06:59:44 crc kubenswrapper[4632]: I1201 06:59:43.999881 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerStarted","Data":"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221"} Dec 01 06:59:44 crc kubenswrapper[4632]: I1201 06:59:44.000218 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerStarted","Data":"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9"} Dec 01 06:59:44 crc kubenswrapper[4632]: I1201 06:59:44.002184 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d43821ac-9a83-469d-9827-4f7b9d0d42c1","Type":"ContainerStarted","Data":"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f"} Dec 01 06:59:44 crc kubenswrapper[4632]: I1201 06:59:44.023200 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.02316804 podStartE2EDuration="2.02316804s" podCreationTimestamp="2025-12-01 06:59:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:44.013160813 +0000 UTC m=+993.578173786" watchObservedRunningTime="2025-12-01 06:59:44.02316804 +0000 UTC m=+993.588181013" Dec 01 06:59:44 crc kubenswrapper[4632]: I1201 06:59:44.034479 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.034449311 podStartE2EDuration="2.034449311s" podCreationTimestamp="2025-12-01 06:59:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 06:59:44.029801884 +0000 UTC m=+993.594814856" watchObservedRunningTime="2025-12-01 06:59:44.034449311 +0000 UTC m=+993.599462284" Dec 01 06:59:47 crc kubenswrapper[4632]: I1201 06:59:47.448115 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 06:59:47 crc kubenswrapper[4632]: I1201 06:59:47.468292 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 06:59:47 crc kubenswrapper[4632]: I1201 06:59:47.468344 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 06:59:50 crc kubenswrapper[4632]: I1201 06:59:50.297332 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 06:59:50 crc kubenswrapper[4632]: I1201 06:59:50.297763 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 06:59:51 crc kubenswrapper[4632]: I1201 06:59:51.382208 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 06:59:51 crc kubenswrapper[4632]: I1201 06:59:51.382473 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.188:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 01 06:59:52 crc kubenswrapper[4632]: I1201 06:59:52.448406 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 06:59:52 crc kubenswrapper[4632]: I1201 06:59:52.468807 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 06:59:52 crc kubenswrapper[4632]: I1201 06:59:52.468862 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 06:59:52 crc kubenswrapper[4632]: I1201 06:59:52.471893 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 06:59:53 crc kubenswrapper[4632]: I1201 06:59:53.114870 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 06:59:53 crc kubenswrapper[4632]: I1201 06:59:53.483485 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 06:59:53 crc kubenswrapper[4632]: I1201 06:59:53.483485 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 06:59:54 crc kubenswrapper[4632]: I1201 06:59:54.944083 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.063742 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.064590 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="5f0d2218-83ab-431d-a2d3-e7d54237abff" containerName="kube-state-metrics" containerID="cri-o://d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708" gracePeriod=30 Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.489840 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.635232 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhgj8\" (UniqueName: \"kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8\") pod \"5f0d2218-83ab-431d-a2d3-e7d54237abff\" (UID: \"5f0d2218-83ab-431d-a2d3-e7d54237abff\") " Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.641255 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8" (OuterVolumeSpecName: "kube-api-access-mhgj8") pod "5f0d2218-83ab-431d-a2d3-e7d54237abff" (UID: "5f0d2218-83ab-431d-a2d3-e7d54237abff"). InnerVolumeSpecName "kube-api-access-mhgj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 06:59:58 crc kubenswrapper[4632]: I1201 06:59:58.737988 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhgj8\" (UniqueName: \"kubernetes.io/projected/5f0d2218-83ab-431d-a2d3-e7d54237abff-kube-api-access-mhgj8\") on node \"crc\" DevicePath \"\"" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.149192 4632 generic.go:334] "Generic (PLEG): container finished" podID="5f0d2218-83ab-431d-a2d3-e7d54237abff" containerID="d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708" exitCode=2 Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.149247 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f0d2218-83ab-431d-a2d3-e7d54237abff","Type":"ContainerDied","Data":"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708"} Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.149255 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.149281 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f0d2218-83ab-431d-a2d3-e7d54237abff","Type":"ContainerDied","Data":"80c67f9f39894396310f7f3b053174f7eb310a0c69d89051ab4b39350fa61772"} Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.149305 4632 scope.go:117] "RemoveContainer" containerID="d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.176897 4632 scope.go:117] "RemoveContainer" containerID="d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708" Dec 01 06:59:59 crc kubenswrapper[4632]: E1201 06:59:59.181967 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708\": container with ID starting with d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708 not found: ID does not exist" containerID="d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.182015 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708"} err="failed to get container status \"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708\": rpc error: code = NotFound desc = could not find container \"d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708\": container with ID starting with d08dd6514daff3c06f8352953a381beacfd2d09d94862de30c588a98b860c708 not found: ID does not exist" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.182085 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.212231 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.222323 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: E1201 06:59:59.222927 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f0d2218-83ab-431d-a2d3-e7d54237abff" containerName="kube-state-metrics" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.222951 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f0d2218-83ab-431d-a2d3-e7d54237abff" containerName="kube-state-metrics" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.223209 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f0d2218-83ab-431d-a2d3-e7d54237abff" containerName="kube-state-metrics" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.224054 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.226762 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.226762 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.234572 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.349701 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.349850 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5cgr\" (UniqueName: \"kubernetes.io/projected/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-api-access-c5cgr\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.350017 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.350066 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.451926 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5cgr\" (UniqueName: \"kubernetes.io/projected/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-api-access-c5cgr\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.452289 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.452467 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.452601 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.457109 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.458523 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.458563 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.465263 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5cgr\" (UniqueName: \"kubernetes.io/projected/7ada4ec3-07bb-43f8-9d48-30d2075314d0-kube-api-access-c5cgr\") pod \"kube-state-metrics-0\" (UID: \"7ada4ec3-07bb-43f8-9d48-30d2075314d0\") " pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.542634 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.613209 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.613530 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-central-agent" containerID="cri-o://6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2" gracePeriod=30 Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.613693 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-notification-agent" containerID="cri-o://d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97" gracePeriod=30 Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.613682 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="sg-core" containerID="cri-o://a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33" gracePeriod=30 Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.613768 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="proxy-httpd" containerID="cri-o://22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a" gracePeriod=30 Dec 01 06:59:59 crc kubenswrapper[4632]: I1201 06:59:59.968728 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 01 06:59:59 crc kubenswrapper[4632]: W1201 06:59:59.971795 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ada4ec3_07bb_43f8_9d48_30d2075314d0.slice/crio-476ca49a75d05375dbc7c34efc9321dd3a5c0e85adc682b451fe0c683d461610 WatchSource:0}: Error finding container 476ca49a75d05375dbc7c34efc9321dd3a5c0e85adc682b451fe0c683d461610: Status 404 returned error can't find the container with id 476ca49a75d05375dbc7c34efc9321dd3a5c0e85adc682b451fe0c683d461610 Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.149304 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2"] Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.151421 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.153378 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.153714 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.176071 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59b49\" (UniqueName: \"kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.176184 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.176345 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.176822 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2"] Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177437 4632 generic.go:334] "Generic (PLEG): container finished" podID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerID="22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a" exitCode=0 Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177458 4632 generic.go:334] "Generic (PLEG): container finished" podID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerID="a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33" exitCode=2 Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177467 4632 generic.go:334] "Generic (PLEG): container finished" podID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerID="6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2" exitCode=0 Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177517 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerDied","Data":"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a"} Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177538 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerDied","Data":"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33"} Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.177548 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerDied","Data":"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2"} Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.178884 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7ada4ec3-07bb-43f8-9d48-30d2075314d0","Type":"ContainerStarted","Data":"476ca49a75d05375dbc7c34efc9321dd3a5c0e85adc682b451fe0c683d461610"} Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.279526 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.279786 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59b49\" (UniqueName: \"kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.279894 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.280866 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.285910 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.308911 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59b49\" (UniqueName: \"kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49\") pod \"collect-profiles-29409540-dkvp2\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.365485 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.367252 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.368633 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.392858 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.478097 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.762033 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f0d2218-83ab-431d-a2d3-e7d54237abff" path="/var/lib/kubelet/pods/5f0d2218-83ab-431d-a2d3-e7d54237abff/volumes" Dec 01 07:00:00 crc kubenswrapper[4632]: I1201 07:00:00.884093 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2"] Dec 01 07:00:00 crc kubenswrapper[4632]: W1201 07:00:00.891175 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd9d9843_a0ae_4d9b_ad41_440d257e647e.slice/crio-8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df WatchSource:0}: Error finding container 8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df: Status 404 returned error can't find the container with id 8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.189088 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"7ada4ec3-07bb-43f8-9d48-30d2075314d0","Type":"ContainerStarted","Data":"0360e2d16483efabaa5a1518b5dc70ebd6e9385293c84ffa61337f0478ac80bb"} Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.189271 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.191694 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" event={"ID":"bd9d9843-a0ae-4d9b-ad41-440d257e647e","Type":"ContainerStarted","Data":"536e94f3e2a64480ec36eaaf825218dae46995b087f5946f9288e16d59e551ce"} Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.191741 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" event={"ID":"bd9d9843-a0ae-4d9b-ad41-440d257e647e","Type":"ContainerStarted","Data":"8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df"} Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.191910 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.194954 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.213293 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.890859456 podStartE2EDuration="2.213272533s" podCreationTimestamp="2025-12-01 06:59:59 +0000 UTC" firstStartedPulling="2025-12-01 06:59:59.974116082 +0000 UTC m=+1009.539129045" lastFinishedPulling="2025-12-01 07:00:00.296529148 +0000 UTC m=+1009.861542122" observedRunningTime="2025-12-01 07:00:01.202738773 +0000 UTC m=+1010.767751756" watchObservedRunningTime="2025-12-01 07:00:01.213272533 +0000 UTC m=+1010.778285507" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.244897 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" podStartSLOduration=1.244879064 podStartE2EDuration="1.244879064s" podCreationTimestamp="2025-12-01 07:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:01.241728852 +0000 UTC m=+1010.806741825" watchObservedRunningTime="2025-12-01 07:00:01.244879064 +0000 UTC m=+1010.809892027" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.379427 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.381497 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.396665 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.505446 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckkg8\" (UniqueName: \"kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.505822 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.505919 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.506104 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.506248 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.506312 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.608683 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckkg8\" (UniqueName: \"kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.608774 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.609620 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.609765 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.610316 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.610601 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.611153 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.611299 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.611846 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.611919 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.612433 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.644686 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckkg8\" (UniqueName: \"kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8\") pod \"dnsmasq-dns-7476c47877-cskmq\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:01 crc kubenswrapper[4632]: I1201 07:00:01.707836 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.179499 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.202195 4632 generic.go:334] "Generic (PLEG): container finished" podID="bd9d9843-a0ae-4d9b-ad41-440d257e647e" containerID="536e94f3e2a64480ec36eaaf825218dae46995b087f5946f9288e16d59e551ce" exitCode=0 Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.202263 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" event={"ID":"bd9d9843-a0ae-4d9b-ad41-440d257e647e","Type":"ContainerDied","Data":"536e94f3e2a64480ec36eaaf825218dae46995b087f5946f9288e16d59e551ce"} Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.209458 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7476c47877-cskmq" event={"ID":"09d6a1d1-b814-4262-9437-f140536ce50d","Type":"ContainerStarted","Data":"d9c2290b55fde864bc4d4bab7e4a73d923593679890c488b124428c53f766364"} Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.474156 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.474519 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.481549 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:00:02 crc kubenswrapper[4632]: I1201 07:00:02.498110 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.221910 4632 generic.go:334] "Generic (PLEG): container finished" podID="09d6a1d1-b814-4262-9437-f140536ce50d" containerID="4d64bc01d2f7f55e9260d521e19816301f8cbe9876921532655775146f8760ca" exitCode=0 Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.222041 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7476c47877-cskmq" event={"ID":"09d6a1d1-b814-4262-9437-f140536ce50d","Type":"ContainerDied","Data":"4d64bc01d2f7f55e9260d521e19816301f8cbe9876921532655775146f8760ca"} Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.430909 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.539443 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.659758 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59b49\" (UniqueName: \"kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49\") pod \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.660288 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume\") pod \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.660541 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume\") pod \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\" (UID: \"bd9d9843-a0ae-4d9b-ad41-440d257e647e\") " Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.661254 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume" (OuterVolumeSpecName: "config-volume") pod "bd9d9843-a0ae-4d9b-ad41-440d257e647e" (UID: "bd9d9843-a0ae-4d9b-ad41-440d257e647e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.666123 4632 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd9d9843-a0ae-4d9b-ad41-440d257e647e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.680470 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49" (OuterVolumeSpecName: "kube-api-access-59b49") pod "bd9d9843-a0ae-4d9b-ad41-440d257e647e" (UID: "bd9d9843-a0ae-4d9b-ad41-440d257e647e"). InnerVolumeSpecName "kube-api-access-59b49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.714617 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bd9d9843-a0ae-4d9b-ad41-440d257e647e" (UID: "bd9d9843-a0ae-4d9b-ad41-440d257e647e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.767742 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59b49\" (UniqueName: \"kubernetes.io/projected/bd9d9843-a0ae-4d9b-ad41-440d257e647e-kube-api-access-59b49\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.767774 4632 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd9d9843-a0ae-4d9b-ad41-440d257e647e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:03 crc kubenswrapper[4632]: I1201 07:00:03.889827 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.073929 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.074683 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.074848 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pdmlx\" (UniqueName: \"kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.074931 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.074992 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.075308 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.075349 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data\") pod \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\" (UID: \"985b6caa-b51e-4da7-ba3b-1dad22e138ff\") " Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.075337 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.075724 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.076483 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.076500 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/985b6caa-b51e-4da7-ba3b-1dad22e138ff-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.079658 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts" (OuterVolumeSpecName: "scripts") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.079934 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx" (OuterVolumeSpecName: "kube-api-access-pdmlx") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "kube-api-access-pdmlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.105252 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.145173 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.153657 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data" (OuterVolumeSpecName: "config-data") pod "985b6caa-b51e-4da7-ba3b-1dad22e138ff" (UID: "985b6caa-b51e-4da7-ba3b-1dad22e138ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.179010 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.179126 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.179190 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.179247 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pdmlx\" (UniqueName: \"kubernetes.io/projected/985b6caa-b51e-4da7-ba3b-1dad22e138ff-kube-api-access-pdmlx\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.179298 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/985b6caa-b51e-4da7-ba3b-1dad22e138ff-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.234565 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" event={"ID":"bd9d9843-a0ae-4d9b-ad41-440d257e647e","Type":"ContainerDied","Data":"8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df"} Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.234613 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a422b7e8480dab5dfc6fbfc5c0b1958cd9626b936808306a5846d1d34b302df" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.234690 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409540-dkvp2" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.246846 4632 generic.go:334] "Generic (PLEG): container finished" podID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerID="d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97" exitCode=0 Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.246899 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.246942 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerDied","Data":"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97"} Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.247005 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"985b6caa-b51e-4da7-ba3b-1dad22e138ff","Type":"ContainerDied","Data":"272253bb148d4f4b0b8359de5fd66387e1bcf9974bd04eef5fdbc30de7f81461"} Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.247026 4632 scope.go:117] "RemoveContainer" containerID="22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.250878 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7476c47877-cskmq" event={"ID":"09d6a1d1-b814-4262-9437-f140536ce50d","Type":"ContainerStarted","Data":"bfd46054c67efe8a4f2837fd9ab612efc70a31f75726d2c418691015efef8b58"} Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.250923 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-log" containerID="cri-o://05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c" gracePeriod=30 Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.251063 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-api" containerID="cri-o://decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c" gracePeriod=30 Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.271605 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7476c47877-cskmq" podStartSLOduration=3.2715864630000002 podStartE2EDuration="3.271586463s" podCreationTimestamp="2025-12-01 07:00:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:04.26863186 +0000 UTC m=+1013.833644843" watchObservedRunningTime="2025-12-01 07:00:04.271586463 +0000 UTC m=+1013.836599436" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.272092 4632 scope.go:117] "RemoveContainer" containerID="a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.289660 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.297850 4632 scope.go:117] "RemoveContainer" containerID="d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.304198 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.316730 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.317207 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-notification-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317226 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-notification-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.317265 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd9d9843-a0ae-4d9b-ad41-440d257e647e" containerName="collect-profiles" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317271 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd9d9843-a0ae-4d9b-ad41-440d257e647e" containerName="collect-profiles" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.317282 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-central-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317287 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-central-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.317297 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="sg-core" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317302 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="sg-core" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.317309 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="proxy-httpd" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317316 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="proxy-httpd" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317589 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="sg-core" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317606 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-notification-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317621 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="proxy-httpd" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317634 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd9d9843-a0ae-4d9b-ad41-440d257e647e" containerName="collect-profiles" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.317646 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" containerName="ceilometer-central-agent" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.319259 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.321028 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.321266 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.321737 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.328542 4632 scope.go:117] "RemoveContainer" containerID="6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.330800 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.355250 4632 scope.go:117] "RemoveContainer" containerID="22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.355741 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a\": container with ID starting with 22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a not found: ID does not exist" containerID="22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.355776 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a"} err="failed to get container status \"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a\": rpc error: code = NotFound desc = could not find container \"22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a\": container with ID starting with 22e06cfd74d89f98114adb6a39a15b0289ce14df09888e523cfd2789cf872d9a not found: ID does not exist" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.355802 4632 scope.go:117] "RemoveContainer" containerID="a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.356089 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33\": container with ID starting with a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33 not found: ID does not exist" containerID="a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.356192 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33"} err="failed to get container status \"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33\": rpc error: code = NotFound desc = could not find container \"a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33\": container with ID starting with a1c159bfc199a4fd080b2da3c03826f4ff70817c1106311bbbd0a2b582711d33 not found: ID does not exist" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.356282 4632 scope.go:117] "RemoveContainer" containerID="d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.356664 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97\": container with ID starting with d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97 not found: ID does not exist" containerID="d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.356696 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97"} err="failed to get container status \"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97\": rpc error: code = NotFound desc = could not find container \"d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97\": container with ID starting with d4e07e1ddd251612cef48cbac2fcced5ace9b1cb50e1fcc3a423bf7a92fa3e97 not found: ID does not exist" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.356714 4632 scope.go:117] "RemoveContainer" containerID="6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2" Dec 01 07:00:04 crc kubenswrapper[4632]: E1201 07:00:04.356973 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2\": container with ID starting with 6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2 not found: ID does not exist" containerID="6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.357022 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2"} err="failed to get container status \"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2\": rpc error: code = NotFound desc = could not find container \"6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2\": container with ID starting with 6e57477dccce9032ae046d1909ea368f4ba585d0da1baa37f57b779360a9d7b2 not found: ID does not exist" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.488689 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.488968 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489221 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9qsg\" (UniqueName: \"kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489372 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489543 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489764 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489827 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.489889 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592038 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9qsg\" (UniqueName: \"kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592099 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592143 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592180 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592199 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592220 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592243 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592269 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592723 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.592939 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.598546 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.598635 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.598682 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.598705 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.600653 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.610804 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9qsg\" (UniqueName: \"kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg\") pod \"ceilometer-0\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.637669 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.763387 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985b6caa-b51e-4da7-ba3b-1dad22e138ff" path="/var/lib/kubelet/pods/985b6caa-b51e-4da7-ba3b-1dad22e138ff/volumes" Dec 01 07:00:04 crc kubenswrapper[4632]: I1201 07:00:04.967815 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.077981 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.154712 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.265573 4632 generic.go:334] "Generic (PLEG): container finished" podID="f7abdf36-162f-45df-81dc-c692d32cf159" containerID="05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c" exitCode=143 Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.265671 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerDied","Data":"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c"} Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.267551 4632 generic.go:334] "Generic (PLEG): container finished" podID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" containerID="c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9" exitCode=137 Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.267654 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.267649 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0343305-d3cf-4375-b5c8-1cd5c5f6054c","Type":"ContainerDied","Data":"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9"} Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.267734 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"e0343305-d3cf-4375-b5c8-1cd5c5f6054c","Type":"ContainerDied","Data":"b71b42c57b17b65fc7b01fb0b63fe768c5002f24c010e44956585c0117acd46e"} Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.267812 4632 scope.go:117] "RemoveContainer" containerID="c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.270333 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerStarted","Data":"46be742db54c1471b08fece272d8e2a8a186daacc49fb017be6e21bb7edbf7d7"} Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.270788 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.302153 4632 scope.go:117] "RemoveContainer" containerID="c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9" Dec 01 07:00:05 crc kubenswrapper[4632]: E1201 07:00:05.302560 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9\": container with ID starting with c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9 not found: ID does not exist" containerID="c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.302591 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9"} err="failed to get container status \"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9\": rpc error: code = NotFound desc = could not find container \"c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9\": container with ID starting with c74bee9c23ee3b3e12720bd2f58c664b9e05d2dff972faaf06ecf55ac636cec9 not found: ID does not exist" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.311887 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle\") pod \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.311938 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs78d\" (UniqueName: \"kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d\") pod \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.312001 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data\") pod \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\" (UID: \"e0343305-d3cf-4375-b5c8-1cd5c5f6054c\") " Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.318427 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d" (OuterVolumeSpecName: "kube-api-access-hs78d") pod "e0343305-d3cf-4375-b5c8-1cd5c5f6054c" (UID: "e0343305-d3cf-4375-b5c8-1cd5c5f6054c"). InnerVolumeSpecName "kube-api-access-hs78d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.336382 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0343305-d3cf-4375-b5c8-1cd5c5f6054c" (UID: "e0343305-d3cf-4375-b5c8-1cd5c5f6054c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.337102 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data" (OuterVolumeSpecName: "config-data") pod "e0343305-d3cf-4375-b5c8-1cd5c5f6054c" (UID: "e0343305-d3cf-4375-b5c8-1cd5c5f6054c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.416213 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.416270 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs78d\" (UniqueName: \"kubernetes.io/projected/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-kube-api-access-hs78d\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.416287 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0343305-d3cf-4375-b5c8-1cd5c5f6054c-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.600772 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.610067 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.618542 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: E1201 07:00:05.619041 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.619062 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.619296 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" containerName="nova-cell1-novncproxy-novncproxy" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.620087 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.622551 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.622607 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.622782 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.624313 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.722644 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.722965 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6lz5\" (UniqueName: \"kubernetes.io/projected/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-kube-api-access-s6lz5\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.723195 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.723277 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.723557 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.825576 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.825704 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6lz5\" (UniqueName: \"kubernetes.io/projected/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-kube-api-access-s6lz5\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.825752 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.825776 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.825829 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.831198 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.831365 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.831884 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.834019 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.844982 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6lz5\" (UniqueName: \"kubernetes.io/projected/c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc-kube-api-access-s6lz5\") pod \"nova-cell1-novncproxy-0\" (UID: \"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc\") " pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:05 crc kubenswrapper[4632]: I1201 07:00:05.980035 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:06 crc kubenswrapper[4632]: I1201 07:00:06.285976 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerStarted","Data":"7a5413eb7bb999a55b7021f7ccf0836de17da57b8841965e9e344a3d817f68c7"} Dec 01 07:00:06 crc kubenswrapper[4632]: W1201 07:00:06.428855 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9c1ad02_ff5b_4f40_8e92_00ceb5acc4fc.slice/crio-9d2a073bfd67c2e2d0399a7684bf5975bac881d582e9ef1ef249781b6476ab49 WatchSource:0}: Error finding container 9d2a073bfd67c2e2d0399a7684bf5975bac881d582e9ef1ef249781b6476ab49: Status 404 returned error can't find the container with id 9d2a073bfd67c2e2d0399a7684bf5975bac881d582e9ef1ef249781b6476ab49 Dec 01 07:00:06 crc kubenswrapper[4632]: I1201 07:00:06.429226 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 01 07:00:06 crc kubenswrapper[4632]: I1201 07:00:06.763575 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0343305-d3cf-4375-b5c8-1cd5c5f6054c" path="/var/lib/kubelet/pods/e0343305-d3cf-4375-b5c8-1cd5c5f6054c/volumes" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.297882 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc","Type":"ContainerStarted","Data":"ac1407e9f96314a7a34c9c311a8a3d78b61fe20447b0dc03022d2aa3e0a9f1c6"} Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.297958 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc","Type":"ContainerStarted","Data":"9d2a073bfd67c2e2d0399a7684bf5975bac881d582e9ef1ef249781b6476ab49"} Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.300146 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerStarted","Data":"9d9bf87aeccc49e78216f375d03d3f366c115800d64035ad7d4b388613609ef0"} Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.336306 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.336285307 podStartE2EDuration="2.336285307s" podCreationTimestamp="2025-12-01 07:00:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:07.315143526 +0000 UTC m=+1016.880156499" watchObservedRunningTime="2025-12-01 07:00:07.336285307 +0000 UTC m=+1016.901298281" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.841640 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.886473 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rf25\" (UniqueName: \"kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25\") pod \"f7abdf36-162f-45df-81dc-c692d32cf159\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.886609 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data\") pod \"f7abdf36-162f-45df-81dc-c692d32cf159\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.886666 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs\") pod \"f7abdf36-162f-45df-81dc-c692d32cf159\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.886694 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle\") pod \"f7abdf36-162f-45df-81dc-c692d32cf159\" (UID: \"f7abdf36-162f-45df-81dc-c692d32cf159\") " Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.887234 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs" (OuterVolumeSpecName: "logs") pod "f7abdf36-162f-45df-81dc-c692d32cf159" (UID: "f7abdf36-162f-45df-81dc-c692d32cf159"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.911937 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25" (OuterVolumeSpecName: "kube-api-access-9rf25") pod "f7abdf36-162f-45df-81dc-c692d32cf159" (UID: "f7abdf36-162f-45df-81dc-c692d32cf159"). InnerVolumeSpecName "kube-api-access-9rf25". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.929516 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data" (OuterVolumeSpecName: "config-data") pod "f7abdf36-162f-45df-81dc-c692d32cf159" (UID: "f7abdf36-162f-45df-81dc-c692d32cf159"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.959325 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7abdf36-162f-45df-81dc-c692d32cf159" (UID: "f7abdf36-162f-45df-81dc-c692d32cf159"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.989225 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rf25\" (UniqueName: \"kubernetes.io/projected/f7abdf36-162f-45df-81dc-c692d32cf159-kube-api-access-9rf25\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.989262 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.989276 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7abdf36-162f-45df-81dc-c692d32cf159-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:07 crc kubenswrapper[4632]: I1201 07:00:07.989286 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7abdf36-162f-45df-81dc-c692d32cf159-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.317107 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerStarted","Data":"9c6efbd9564435d3884d590bf2199d3f1d5f834c257bde7ae9b3a5f069a84f01"} Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.319868 4632 generic.go:334] "Generic (PLEG): container finished" podID="f7abdf36-162f-45df-81dc-c692d32cf159" containerID="decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c" exitCode=0 Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.319954 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.319937 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerDied","Data":"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c"} Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.320017 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f7abdf36-162f-45df-81dc-c692d32cf159","Type":"ContainerDied","Data":"93b104b7c2b7d8f02325c355387596c43d1ca001ec652d15637d0acea1ba301e"} Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.320048 4632 scope.go:117] "RemoveContainer" containerID="decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.347023 4632 scope.go:117] "RemoveContainer" containerID="05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.358396 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.372429 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.382469 4632 scope.go:117] "RemoveContainer" containerID="decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c" Dec 01 07:00:08 crc kubenswrapper[4632]: E1201 07:00:08.382873 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c\": container with ID starting with decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c not found: ID does not exist" containerID="decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.382909 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c"} err="failed to get container status \"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c\": rpc error: code = NotFound desc = could not find container \"decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c\": container with ID starting with decdfdfa3c4de416d941c87afa869df560c413e79a7300c58c8d3a435d6f262c not found: ID does not exist" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.382934 4632 scope.go:117] "RemoveContainer" containerID="05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383139 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:08 crc kubenswrapper[4632]: E1201 07:00:08.383298 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c\": container with ID starting with 05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c not found: ID does not exist" containerID="05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383334 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c"} err="failed to get container status \"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c\": rpc error: code = NotFound desc = could not find container \"05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c\": container with ID starting with 05c6f3e711a3644ed9d081796f79e21cdab3ed1aa5afb6fde50f6c47a5e0b49c not found: ID does not exist" Dec 01 07:00:08 crc kubenswrapper[4632]: E1201 07:00:08.383550 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-api" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383569 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-api" Dec 01 07:00:08 crc kubenswrapper[4632]: E1201 07:00:08.383590 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-log" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383598 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-log" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383789 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-api" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.383821 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" containerName="nova-api-log" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.384802 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.388593 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.388860 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.392168 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.395856 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.395930 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.395983 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.396007 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.396082 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2kpv\" (UniqueName: \"kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.396111 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.403876 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498175 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498230 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498327 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2kpv\" (UniqueName: \"kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498585 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498630 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.498697 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.499174 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.504127 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.504675 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.504810 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.505605 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.514603 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2kpv\" (UniqueName: \"kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv\") pod \"nova-api-0\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.708586 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:08 crc kubenswrapper[4632]: I1201 07:00:08.759801 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7abdf36-162f-45df-81dc-c692d32cf159" path="/var/lib/kubelet/pods/f7abdf36-162f-45df-81dc-c692d32cf159/volumes" Dec 01 07:00:09 crc kubenswrapper[4632]: W1201 07:00:09.134251 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56716ad7_406c_49f8_bbc8_4ba990861e48.slice/crio-9056baa03a5408fdbf5e8d17aeb0896f7200d6fc03716b79132809eebbbeff5f WatchSource:0}: Error finding container 9056baa03a5408fdbf5e8d17aeb0896f7200d6fc03716b79132809eebbbeff5f: Status 404 returned error can't find the container with id 9056baa03a5408fdbf5e8d17aeb0896f7200d6fc03716b79132809eebbbeff5f Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.137018 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.331476 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerStarted","Data":"0658b9380c8dea41fac6f5c33575cbf30ed5fd2ef81dea8bf6d3489a0c70df49"} Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.331755 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerStarted","Data":"9056baa03a5408fdbf5e8d17aeb0896f7200d6fc03716b79132809eebbbeff5f"} Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335207 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerStarted","Data":"02e87ce45e2c993f39de9a6871e34ea2013bb8676d68910776b5c3cbb1695f7b"} Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335333 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-central-agent" containerID="cri-o://7a5413eb7bb999a55b7021f7ccf0836de17da57b8841965e9e344a3d817f68c7" gracePeriod=30 Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335397 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="sg-core" containerID="cri-o://9c6efbd9564435d3884d590bf2199d3f1d5f834c257bde7ae9b3a5f069a84f01" gracePeriod=30 Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335418 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-notification-agent" containerID="cri-o://9d9bf87aeccc49e78216f375d03d3f366c115800d64035ad7d4b388613609ef0" gracePeriod=30 Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335470 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="proxy-httpd" containerID="cri-o://02e87ce45e2c993f39de9a6871e34ea2013bb8676d68910776b5c3cbb1695f7b" gracePeriod=30 Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.335572 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.363183 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.457890816 podStartE2EDuration="5.363157961s" podCreationTimestamp="2025-12-01 07:00:04 +0000 UTC" firstStartedPulling="2025-12-01 07:00:05.073815683 +0000 UTC m=+1014.638828646" lastFinishedPulling="2025-12-01 07:00:08.979082818 +0000 UTC m=+1018.544095791" observedRunningTime="2025-12-01 07:00:09.356012041 +0000 UTC m=+1018.921025014" watchObservedRunningTime="2025-12-01 07:00:09.363157961 +0000 UTC m=+1018.928170934" Dec 01 07:00:09 crc kubenswrapper[4632]: I1201 07:00:09.556957 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.359446 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerStarted","Data":"25d46ddcca3dddaad82d49d7150321cf1863ebbaa8010769e572b761c55600d7"} Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362492 4632 generic.go:334] "Generic (PLEG): container finished" podID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerID="02e87ce45e2c993f39de9a6871e34ea2013bb8676d68910776b5c3cbb1695f7b" exitCode=0 Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362523 4632 generic.go:334] "Generic (PLEG): container finished" podID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerID="9c6efbd9564435d3884d590bf2199d3f1d5f834c257bde7ae9b3a5f069a84f01" exitCode=2 Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362532 4632 generic.go:334] "Generic (PLEG): container finished" podID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerID="9d9bf87aeccc49e78216f375d03d3f366c115800d64035ad7d4b388613609ef0" exitCode=0 Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362529 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerDied","Data":"02e87ce45e2c993f39de9a6871e34ea2013bb8676d68910776b5c3cbb1695f7b"} Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362567 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerDied","Data":"9c6efbd9564435d3884d590bf2199d3f1d5f834c257bde7ae9b3a5f069a84f01"} Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.362579 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerDied","Data":"9d9bf87aeccc49e78216f375d03d3f366c115800d64035ad7d4b388613609ef0"} Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.386209 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.3861973069999998 podStartE2EDuration="2.386197307s" podCreationTimestamp="2025-12-01 07:00:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:10.382743853 +0000 UTC m=+1019.947756826" watchObservedRunningTime="2025-12-01 07:00:10.386197307 +0000 UTC m=+1019.951210271" Dec 01 07:00:10 crc kubenswrapper[4632]: I1201 07:00:10.981498 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.382216 4632 generic.go:334] "Generic (PLEG): container finished" podID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerID="7a5413eb7bb999a55b7021f7ccf0836de17da57b8841965e9e344a3d817f68c7" exitCode=0 Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.383065 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerDied","Data":"7a5413eb7bb999a55b7021f7ccf0836de17da57b8841965e9e344a3d817f68c7"} Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.664765 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.709751 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.758496 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.758736 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="dnsmasq-dns" containerID="cri-o://6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e" gracePeriod=10 Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768525 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768661 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768687 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768729 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768795 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768817 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.768918 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9qsg\" (UniqueName: \"kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.769026 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml\") pod \"e846df70-ffe8-4756-9b4b-e27612b7cd73\" (UID: \"e846df70-ffe8-4756-9b4b-e27612b7cd73\") " Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.769041 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.769348 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.771122 4632 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.771143 4632 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e846df70-ffe8-4756-9b4b-e27612b7cd73-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.776403 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts" (OuterVolumeSpecName: "scripts") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.805719 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg" (OuterVolumeSpecName: "kube-api-access-x9qsg") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "kube-api-access-x9qsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.831604 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.837748 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.872840 4632 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.872869 4632 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.872879 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.872890 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9qsg\" (UniqueName: \"kubernetes.io/projected/e846df70-ffe8-4756-9b4b-e27612b7cd73-kube-api-access-x9qsg\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.882684 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.920523 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data" (OuterVolumeSpecName: "config-data") pod "e846df70-ffe8-4756-9b4b-e27612b7cd73" (UID: "e846df70-ffe8-4756-9b4b-e27612b7cd73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.976794 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:11 crc kubenswrapper[4632]: I1201 07:00:11.976833 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e846df70-ffe8-4756-9b4b-e27612b7cd73-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.119014 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187205 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rj79j\" (UniqueName: \"kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187324 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187470 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187523 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187640 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.187674 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config\") pod \"16718254-ede5-4551-a80a-d5a80873630b\" (UID: \"16718254-ede5-4551-a80a-d5a80873630b\") " Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.192757 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j" (OuterVolumeSpecName: "kube-api-access-rj79j") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "kube-api-access-rj79j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.227952 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.230028 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.238651 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config" (OuterVolumeSpecName: "config") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.243407 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.249523 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "16718254-ede5-4551-a80a-d5a80873630b" (UID: "16718254-ede5-4551-a80a-d5a80873630b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291313 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rj79j\" (UniqueName: \"kubernetes.io/projected/16718254-ede5-4551-a80a-d5a80873630b-kube-api-access-rj79j\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291364 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291381 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291392 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291403 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.291413 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16718254-ede5-4551-a80a-d5a80873630b-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.403225 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e846df70-ffe8-4756-9b4b-e27612b7cd73","Type":"ContainerDied","Data":"46be742db54c1471b08fece272d8e2a8a186daacc49fb017be6e21bb7edbf7d7"} Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.403284 4632 scope.go:117] "RemoveContainer" containerID="02e87ce45e2c993f39de9a6871e34ea2013bb8676d68910776b5c3cbb1695f7b" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.403401 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.413247 4632 generic.go:334] "Generic (PLEG): container finished" podID="16718254-ede5-4551-a80a-d5a80873630b" containerID="6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e" exitCode=0 Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.413307 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" event={"ID":"16718254-ede5-4551-a80a-d5a80873630b","Type":"ContainerDied","Data":"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e"} Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.413342 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" event={"ID":"16718254-ede5-4551-a80a-d5a80873630b","Type":"ContainerDied","Data":"1fbd64ca2b27c84be76c82d0a76d7e1321e0c78f919cf123b8220d341426a3ee"} Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.413507 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-88794ccbc-bcgk4" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.440481 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.442166 4632 scope.go:117] "RemoveContainer" containerID="9c6efbd9564435d3884d590bf2199d3f1d5f834c257bde7ae9b3a5f069a84f01" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.454205 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.463315 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.469752 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-88794ccbc-bcgk4"] Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.475507 4632 scope.go:117] "RemoveContainer" containerID="9d9bf87aeccc49e78216f375d03d3f366c115800d64035ad7d4b388613609ef0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476263 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476731 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="init" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476745 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="init" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476758 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="proxy-httpd" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476766 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="proxy-httpd" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476784 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="sg-core" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476790 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="sg-core" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476805 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-central-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476811 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-central-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476819 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-notification-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476824 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-notification-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.476833 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="dnsmasq-dns" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.476839 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="dnsmasq-dns" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.477020 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="16718254-ede5-4551-a80a-d5a80873630b" containerName="dnsmasq-dns" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.477031 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="sg-core" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.477044 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-central-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.477077 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="proxy-httpd" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.477085 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" containerName="ceilometer-notification-agent" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.479238 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.485143 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.485421 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.486025 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.495160 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501513 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-config-data\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501548 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501637 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501657 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501674 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlkz7\" (UniqueName: \"kubernetes.io/projected/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-kube-api-access-wlkz7\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501695 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501715 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-scripts\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.501737 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.511921 4632 scope.go:117] "RemoveContainer" containerID="7a5413eb7bb999a55b7021f7ccf0836de17da57b8841965e9e344a3d817f68c7" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.529740 4632 scope.go:117] "RemoveContainer" containerID="6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.545366 4632 scope.go:117] "RemoveContainer" containerID="1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.567807 4632 scope.go:117] "RemoveContainer" containerID="6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.571145 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e\": container with ID starting with 6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e not found: ID does not exist" containerID="6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.571181 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e"} err="failed to get container status \"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e\": rpc error: code = NotFound desc = could not find container \"6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e\": container with ID starting with 6b28c9f816ee449d8b745a1b275b1f05f9df55bd85779c8188732bcbd4840b5e not found: ID does not exist" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.571204 4632 scope.go:117] "RemoveContainer" containerID="1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2" Dec 01 07:00:12 crc kubenswrapper[4632]: E1201 07:00:12.576928 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2\": container with ID starting with 1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2 not found: ID does not exist" containerID="1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.576961 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2"} err="failed to get container status \"1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2\": rpc error: code = NotFound desc = could not find container \"1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2\": container with ID starting with 1a374aceb207429da86d3135f72671638b1993342a61c9acdc6a9121320a98b2 not found: ID does not exist" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604296 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604420 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-config-data\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604488 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604780 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604817 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604840 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlkz7\" (UniqueName: \"kubernetes.io/projected/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-kube-api-access-wlkz7\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604894 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.604935 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-scripts\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.605204 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-log-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.605625 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-run-httpd\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.612183 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.614081 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.614299 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.614967 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-scripts\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.615817 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-config-data\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.621942 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlkz7\" (UniqueName: \"kubernetes.io/projected/1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9-kube-api-access-wlkz7\") pod \"ceilometer-0\" (UID: \"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9\") " pod="openstack/ceilometer-0" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.764574 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16718254-ede5-4551-a80a-d5a80873630b" path="/var/lib/kubelet/pods/16718254-ede5-4551-a80a-d5a80873630b/volumes" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.765529 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e846df70-ffe8-4756-9b4b-e27612b7cd73" path="/var/lib/kubelet/pods/e846df70-ffe8-4756-9b4b-e27612b7cd73/volumes" Dec 01 07:00:12 crc kubenswrapper[4632]: I1201 07:00:12.805069 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 01 07:00:13 crc kubenswrapper[4632]: I1201 07:00:13.238696 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 01 07:00:13 crc kubenswrapper[4632]: W1201 07:00:13.239369 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c26be38_a4fd_4756_ab66_7f0ef9d8cbc9.slice/crio-5369a9574abe7ea65fa0ce7bbe07ad0a50d0e0916f4aa80facbf6d635b5c788b WatchSource:0}: Error finding container 5369a9574abe7ea65fa0ce7bbe07ad0a50d0e0916f4aa80facbf6d635b5c788b: Status 404 returned error can't find the container with id 5369a9574abe7ea65fa0ce7bbe07ad0a50d0e0916f4aa80facbf6d635b5c788b Dec 01 07:00:13 crc kubenswrapper[4632]: I1201 07:00:13.459443 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9","Type":"ContainerStarted","Data":"5369a9574abe7ea65fa0ce7bbe07ad0a50d0e0916f4aa80facbf6d635b5c788b"} Dec 01 07:00:14 crc kubenswrapper[4632]: I1201 07:00:14.473967 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9","Type":"ContainerStarted","Data":"d7b54b782229ed4765694384870bc7374fcabe1a5d4945d30f227d8dbdfb1a4b"} Dec 01 07:00:15 crc kubenswrapper[4632]: I1201 07:00:15.499155 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9","Type":"ContainerStarted","Data":"37aba74f7f9ca100eb5b657cdcb0113a280abc912a4ad163b44a0eada46269d4"} Dec 01 07:00:15 crc kubenswrapper[4632]: I1201 07:00:15.982013 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.006623 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.515972 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9","Type":"ContainerStarted","Data":"178dd258b0c023566b5cf761cf5ffe28fc2a4491353d9ce08d7431d7d14a4a9b"} Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.545118 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.716882 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-hgt5p"] Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.718375 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.721278 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.721470 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.742479 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hgt5p"] Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.829479 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.829545 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.829746 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.829868 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72dh6\" (UniqueName: \"kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.931772 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.931887 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72dh6\" (UniqueName: \"kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.931985 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.932020 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.941956 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.956819 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.957310 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:16 crc kubenswrapper[4632]: I1201 07:00:16.959034 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72dh6\" (UniqueName: \"kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6\") pod \"nova-cell1-cell-mapping-hgt5p\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:17 crc kubenswrapper[4632]: I1201 07:00:17.037397 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:17 crc kubenswrapper[4632]: I1201 07:00:17.486172 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-hgt5p"] Dec 01 07:00:17 crc kubenswrapper[4632]: I1201 07:00:17.545503 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hgt5p" event={"ID":"06654851-ed77-41a9-9cea-0baaadb44227","Type":"ContainerStarted","Data":"08448162c8035914aab0b67bd5d7ad4a73ee1fd77fc7b76e954ab80cf5e6cc4a"} Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.557882 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hgt5p" event={"ID":"06654851-ed77-41a9-9cea-0baaadb44227","Type":"ContainerStarted","Data":"af011fdcc774b7f78997b6e480cdcf8b6eaaac4b4b53410b3a480141b4f81314"} Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.560922 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9","Type":"ContainerStarted","Data":"357f8a24c412fc6b111afa288a5f89f1e1710e28de5f61f8d4e52e891253a16a"} Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.561170 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.579253 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-hgt5p" podStartSLOduration=2.579235748 podStartE2EDuration="2.579235748s" podCreationTimestamp="2025-12-01 07:00:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:18.572277522 +0000 UTC m=+1028.137290495" watchObservedRunningTime="2025-12-01 07:00:18.579235748 +0000 UTC m=+1028.144248720" Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.613151 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.252804968 podStartE2EDuration="6.613129574s" podCreationTimestamp="2025-12-01 07:00:12 +0000 UTC" firstStartedPulling="2025-12-01 07:00:13.242635018 +0000 UTC m=+1022.807647990" lastFinishedPulling="2025-12-01 07:00:17.602959623 +0000 UTC m=+1027.167972596" observedRunningTime="2025-12-01 07:00:18.594708315 +0000 UTC m=+1028.159721298" watchObservedRunningTime="2025-12-01 07:00:18.613129574 +0000 UTC m=+1028.178142548" Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.708829 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:00:18 crc kubenswrapper[4632]: I1201 07:00:18.708913 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:00:19 crc kubenswrapper[4632]: I1201 07:00:19.728565 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:19 crc kubenswrapper[4632]: I1201 07:00:19.728586 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.196:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:22 crc kubenswrapper[4632]: I1201 07:00:22.605375 4632 generic.go:334] "Generic (PLEG): container finished" podID="06654851-ed77-41a9-9cea-0baaadb44227" containerID="af011fdcc774b7f78997b6e480cdcf8b6eaaac4b4b53410b3a480141b4f81314" exitCode=0 Dec 01 07:00:22 crc kubenswrapper[4632]: I1201 07:00:22.605517 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hgt5p" event={"ID":"06654851-ed77-41a9-9cea-0baaadb44227","Type":"ContainerDied","Data":"af011fdcc774b7f78997b6e480cdcf8b6eaaac4b4b53410b3a480141b4f81314"} Dec 01 07:00:23 crc kubenswrapper[4632]: I1201 07:00:23.942664 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.082927 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data\") pod \"06654851-ed77-41a9-9cea-0baaadb44227\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.083053 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72dh6\" (UniqueName: \"kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6\") pod \"06654851-ed77-41a9-9cea-0baaadb44227\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.083085 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle\") pod \"06654851-ed77-41a9-9cea-0baaadb44227\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.083401 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts\") pod \"06654851-ed77-41a9-9cea-0baaadb44227\" (UID: \"06654851-ed77-41a9-9cea-0baaadb44227\") " Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.090203 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts" (OuterVolumeSpecName: "scripts") pod "06654851-ed77-41a9-9cea-0baaadb44227" (UID: "06654851-ed77-41a9-9cea-0baaadb44227"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.090406 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6" (OuterVolumeSpecName: "kube-api-access-72dh6") pod "06654851-ed77-41a9-9cea-0baaadb44227" (UID: "06654851-ed77-41a9-9cea-0baaadb44227"). InnerVolumeSpecName "kube-api-access-72dh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.110838 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06654851-ed77-41a9-9cea-0baaadb44227" (UID: "06654851-ed77-41a9-9cea-0baaadb44227"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.111835 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data" (OuterVolumeSpecName: "config-data") pod "06654851-ed77-41a9-9cea-0baaadb44227" (UID: "06654851-ed77-41a9-9cea-0baaadb44227"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.187123 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.187181 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72dh6\" (UniqueName: \"kubernetes.io/projected/06654851-ed77-41a9-9cea-0baaadb44227-kube-api-access-72dh6\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.187198 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.187209 4632 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06654851-ed77-41a9-9cea-0baaadb44227-scripts\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.631499 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-hgt5p" event={"ID":"06654851-ed77-41a9-9cea-0baaadb44227","Type":"ContainerDied","Data":"08448162c8035914aab0b67bd5d7ad4a73ee1fd77fc7b76e954ab80cf5e6cc4a"} Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.631815 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08448162c8035914aab0b67bd5d7ad4a73ee1fd77fc7b76e954ab80cf5e6cc4a" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.631590 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-hgt5p" Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.807702 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.807929 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-log" containerID="cri-o://0658b9380c8dea41fac6f5c33575cbf30ed5fd2ef81dea8bf6d3489a0c70df49" gracePeriod=30 Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.808079 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-api" containerID="cri-o://25d46ddcca3dddaad82d49d7150321cf1863ebbaa8010769e572b761c55600d7" gracePeriod=30 Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.827636 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.827882 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" containerName="nova-scheduler-scheduler" containerID="cri-o://fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f" gracePeriod=30 Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.850734 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.851087 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" containerID="cri-o://43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9" gracePeriod=30 Dec 01 07:00:24 crc kubenswrapper[4632]: I1201 07:00:24.851136 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" containerID="cri-o://bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221" gracePeriod=30 Dec 01 07:00:25 crc kubenswrapper[4632]: I1201 07:00:25.641545 4632 generic.go:334] "Generic (PLEG): container finished" podID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerID="0658b9380c8dea41fac6f5c33575cbf30ed5fd2ef81dea8bf6d3489a0c70df49" exitCode=143 Dec 01 07:00:25 crc kubenswrapper[4632]: I1201 07:00:25.641625 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerDied","Data":"0658b9380c8dea41fac6f5c33575cbf30ed5fd2ef81dea8bf6d3489a0c70df49"} Dec 01 07:00:25 crc kubenswrapper[4632]: I1201 07:00:25.643576 4632 generic.go:334] "Generic (PLEG): container finished" podID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerID="43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9" exitCode=143 Dec 01 07:00:25 crc kubenswrapper[4632]: I1201 07:00:25.643616 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerDied","Data":"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9"} Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.062338 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.232876 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkggz\" (UniqueName: \"kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz\") pod \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.233056 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data\") pod \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.233234 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle\") pod \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\" (UID: \"d43821ac-9a83-469d-9827-4f7b9d0d42c1\") " Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.240020 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz" (OuterVolumeSpecName: "kube-api-access-bkggz") pod "d43821ac-9a83-469d-9827-4f7b9d0d42c1" (UID: "d43821ac-9a83-469d-9827-4f7b9d0d42c1"). InnerVolumeSpecName "kube-api-access-bkggz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.259018 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data" (OuterVolumeSpecName: "config-data") pod "d43821ac-9a83-469d-9827-4f7b9d0d42c1" (UID: "d43821ac-9a83-469d-9827-4f7b9d0d42c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.260173 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d43821ac-9a83-469d-9827-4f7b9d0d42c1" (UID: "d43821ac-9a83-469d-9827-4f7b9d0d42c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.335835 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.335868 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkggz\" (UniqueName: \"kubernetes.io/projected/d43821ac-9a83-469d-9827-4f7b9d0d42c1-kube-api-access-bkggz\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.335882 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d43821ac-9a83-469d-9827-4f7b9d0d42c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.656821 4632 generic.go:334] "Generic (PLEG): container finished" podID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" containerID="fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f" exitCode=0 Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.656896 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.656922 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d43821ac-9a83-469d-9827-4f7b9d0d42c1","Type":"ContainerDied","Data":"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f"} Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.656996 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d43821ac-9a83-469d-9827-4f7b9d0d42c1","Type":"ContainerDied","Data":"4ea9bf33f0c9105371c61ba53e38bb848ab8a821f98699df17dde6c0645cdd6b"} Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.657020 4632 scope.go:117] "RemoveContainer" containerID="fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.694073 4632 scope.go:117] "RemoveContainer" containerID="fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f" Dec 01 07:00:26 crc kubenswrapper[4632]: E1201 07:00:26.694622 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f\": container with ID starting with fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f not found: ID does not exist" containerID="fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.694694 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f"} err="failed to get container status \"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f\": rpc error: code = NotFound desc = could not find container \"fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f\": container with ID starting with fe8be1876445e33ea1df74144b0acf9f71db8570eaca206e0489f7047eed8b1f not found: ID does not exist" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.699155 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.713830 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.725403 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:26 crc kubenswrapper[4632]: E1201 07:00:26.725877 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06654851-ed77-41a9-9cea-0baaadb44227" containerName="nova-manage" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.725899 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="06654851-ed77-41a9-9cea-0baaadb44227" containerName="nova-manage" Dec 01 07:00:26 crc kubenswrapper[4632]: E1201 07:00:26.725916 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" containerName="nova-scheduler-scheduler" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.725924 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" containerName="nova-scheduler-scheduler" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.726115 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" containerName="nova-scheduler-scheduler" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.726142 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="06654851-ed77-41a9-9cea-0baaadb44227" containerName="nova-manage" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.726843 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.729007 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.732603 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.759174 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d43821ac-9a83-469d-9827-4f7b9d0d42c1" path="/var/lib/kubelet/pods/d43821ac-9a83-469d-9827-4f7b9d0d42c1/volumes" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.845992 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.846169 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mndb\" (UniqueName: \"kubernetes.io/projected/ec63fa13-7856-421c-ab7f-7281a42a6d67-kube-api-access-7mndb\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.846378 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-config-data\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.948929 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-config-data\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.949104 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.949413 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mndb\" (UniqueName: \"kubernetes.io/projected/ec63fa13-7856-421c-ab7f-7281a42a6d67-kube-api-access-7mndb\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.956656 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.957108 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec63fa13-7856-421c-ab7f-7281a42a6d67-config-data\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:26 crc kubenswrapper[4632]: I1201 07:00:26.964329 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mndb\" (UniqueName: \"kubernetes.io/projected/ec63fa13-7856-421c-ab7f-7281a42a6d67-kube-api-access-7mndb\") pod \"nova-scheduler-0\" (UID: \"ec63fa13-7856-421c-ab7f-7281a42a6d67\") " pod="openstack/nova-scheduler-0" Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.041363 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.459788 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 01 07:00:27 crc kubenswrapper[4632]: W1201 07:00:27.471087 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec63fa13_7856_421c_ab7f_7281a42a6d67.slice/crio-eb68787b75e3fefe9ac9d89103ca17703717a830f4e954557adf461263958eb8 WatchSource:0}: Error finding container eb68787b75e3fefe9ac9d89103ca17703717a830f4e954557adf461263958eb8: Status 404 returned error can't find the container with id eb68787b75e3fefe9ac9d89103ca17703717a830f4e954557adf461263958eb8 Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.667378 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec63fa13-7856-421c-ab7f-7281a42a6d67","Type":"ContainerStarted","Data":"db9dbf959a774aa91cd2afacaef2d936b9bad54de5ef0b3fa13ba2a1ae86cee3"} Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.667428 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ec63fa13-7856-421c-ab7f-7281a42a6d67","Type":"ContainerStarted","Data":"eb68787b75e3fefe9ac9d89103ca17703717a830f4e954557adf461263958eb8"} Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.941389 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:52828->10.217.0.190:8775: read: connection reset by peer" Dec 01 07:00:27 crc kubenswrapper[4632]: I1201 07:00:27.941426 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.190:8775/\": read tcp 10.217.0.2:52834->10.217.0.190:8775: read: connection reset by peer" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.339877 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.372726 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.372711873 podStartE2EDuration="2.372711873s" podCreationTimestamp="2025-12-01 07:00:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:27.682700106 +0000 UTC m=+1037.247713079" watchObservedRunningTime="2025-12-01 07:00:28.372711873 +0000 UTC m=+1037.937724847" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.381041 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle\") pod \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.381176 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dsbg\" (UniqueName: \"kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg\") pod \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.381211 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs\") pod \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.381263 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs\") pod \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.381283 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data\") pod \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\" (UID: \"00425bd3-e9db-4a5b-a064-9c75692cf6c1\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.382240 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs" (OuterVolumeSpecName: "logs") pod "00425bd3-e9db-4a5b-a064-9c75692cf6c1" (UID: "00425bd3-e9db-4a5b-a064-9c75692cf6c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.406384 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "00425bd3-e9db-4a5b-a064-9c75692cf6c1" (UID: "00425bd3-e9db-4a5b-a064-9c75692cf6c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.406424 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data" (OuterVolumeSpecName: "config-data") pod "00425bd3-e9db-4a5b-a064-9c75692cf6c1" (UID: "00425bd3-e9db-4a5b-a064-9c75692cf6c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.412215 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg" (OuterVolumeSpecName: "kube-api-access-4dsbg") pod "00425bd3-e9db-4a5b-a064-9c75692cf6c1" (UID: "00425bd3-e9db-4a5b-a064-9c75692cf6c1"). InnerVolumeSpecName "kube-api-access-4dsbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.446531 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "00425bd3-e9db-4a5b-a064-9c75692cf6c1" (UID: "00425bd3-e9db-4a5b-a064-9c75692cf6c1"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.483496 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00425bd3-e9db-4a5b-a064-9c75692cf6c1-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.483518 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.483528 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.483539 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dsbg\" (UniqueName: \"kubernetes.io/projected/00425bd3-e9db-4a5b-a064-9c75692cf6c1-kube-api-access-4dsbg\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.483548 4632 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/00425bd3-e9db-4a5b-a064-9c75692cf6c1-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.705740 4632 generic.go:334] "Generic (PLEG): container finished" podID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerID="bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221" exitCode=0 Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.705835 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerDied","Data":"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221"} Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.705868 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"00425bd3-e9db-4a5b-a064-9c75692cf6c1","Type":"ContainerDied","Data":"dd18d0901f5d9df5047e0880eba834d2772354262890c0dc6cd80c064b87a40e"} Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.705889 4632 scope.go:117] "RemoveContainer" containerID="bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.706081 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.717132 4632 generic.go:334] "Generic (PLEG): container finished" podID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerID="25d46ddcca3dddaad82d49d7150321cf1863ebbaa8010769e572b761c55600d7" exitCode=0 Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.719546 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerDied","Data":"25d46ddcca3dddaad82d49d7150321cf1863ebbaa8010769e572b761c55600d7"} Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.732852 4632 scope.go:117] "RemoveContainer" containerID="43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.772515 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.778862 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.783171 4632 scope.go:117] "RemoveContainer" containerID="bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221" Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.783587 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221\": container with ID starting with bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221 not found: ID does not exist" containerID="bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.783630 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221"} err="failed to get container status \"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221\": rpc error: code = NotFound desc = could not find container \"bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221\": container with ID starting with bd3608b72fa277b4bad3bd4dd2e5f229ecd355ecc70e1182f259a63975a24221 not found: ID does not exist" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.783655 4632 scope.go:117] "RemoveContainer" containerID="43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9" Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.783889 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9\": container with ID starting with 43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9 not found: ID does not exist" containerID="43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.783913 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9"} err="failed to get container status \"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9\": rpc error: code = NotFound desc = could not find container \"43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9\": container with ID starting with 43753ea9160bc87d18dd0a1eda2c3531c4388a1974d3fd7de30002df7b3cbab9 not found: ID does not exist" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.784538 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.795504 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.796218 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-log" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796241 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-log" Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.796267 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796275 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.796403 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796419 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" Dec 01 07:00:28 crc kubenswrapper[4632]: E1201 07:00:28.796468 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-api" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796483 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-api" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796783 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-metadata" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796811 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-log" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796845 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" containerName="nova-metadata-log" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.796861 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" containerName="nova-api-api" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.800688 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.805237 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.805596 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.806399 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907049 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907179 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907314 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2kpv\" (UniqueName: \"kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907542 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907637 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.907728 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs\") pod \"56716ad7-406c-49f8-bbc8-4ba990861e48\" (UID: \"56716ad7-406c-49f8-bbc8-4ba990861e48\") " Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.908498 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs" (OuterVolumeSpecName: "logs") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.908702 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.908796 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.908886 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5q2w\" (UniqueName: \"kubernetes.io/projected/8beb9f92-1512-4843-a060-e7407372d147-kube-api-access-g5q2w\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.909033 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-config-data\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.909601 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8beb9f92-1512-4843-a060-e7407372d147-logs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.910020 4632 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56716ad7-406c-49f8-bbc8-4ba990861e48-logs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.912925 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv" (OuterVolumeSpecName: "kube-api-access-c2kpv") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "kube-api-access-c2kpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.932756 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data" (OuterVolumeSpecName: "config-data") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.934908 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.954772 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:28 crc kubenswrapper[4632]: I1201 07:00:28.956519 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "56716ad7-406c-49f8-bbc8-4ba990861e48" (UID: "56716ad7-406c-49f8-bbc8-4ba990861e48"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012459 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-config-data\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012611 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8beb9f92-1512-4843-a060-e7407372d147-logs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012655 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012689 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012720 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5q2w\" (UniqueName: \"kubernetes.io/projected/8beb9f92-1512-4843-a060-e7407372d147-kube-api-access-g5q2w\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012775 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012796 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012810 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2kpv\" (UniqueName: \"kubernetes.io/projected/56716ad7-406c-49f8-bbc8-4ba990861e48-kube-api-access-c2kpv\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012822 4632 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.012833 4632 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56716ad7-406c-49f8-bbc8-4ba990861e48-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.013111 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8beb9f92-1512-4843-a060-e7407372d147-logs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.015622 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-config-data\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.017126 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.021989 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8beb9f92-1512-4843-a060-e7407372d147-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.028624 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5q2w\" (UniqueName: \"kubernetes.io/projected/8beb9f92-1512-4843-a060-e7407372d147-kube-api-access-g5q2w\") pod \"nova-metadata-0\" (UID: \"8beb9f92-1512-4843-a060-e7407372d147\") " pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.127057 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.676469 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 01 07:00:29 crc kubenswrapper[4632]: W1201 07:00:29.683525 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8beb9f92_1512_4843_a060_e7407372d147.slice/crio-951806f5e689f22a281d01a2281786974bb4a0704f9f60da9ae587a1961f215a WatchSource:0}: Error finding container 951806f5e689f22a281d01a2281786974bb4a0704f9f60da9ae587a1961f215a: Status 404 returned error can't find the container with id 951806f5e689f22a281d01a2281786974bb4a0704f9f60da9ae587a1961f215a Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.729907 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"56716ad7-406c-49f8-bbc8-4ba990861e48","Type":"ContainerDied","Data":"9056baa03a5408fdbf5e8d17aeb0896f7200d6fc03716b79132809eebbbeff5f"} Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.729971 4632 scope.go:117] "RemoveContainer" containerID="25d46ddcca3dddaad82d49d7150321cf1863ebbaa8010769e572b761c55600d7" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.730079 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.738820 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8beb9f92-1512-4843-a060-e7407372d147","Type":"ContainerStarted","Data":"951806f5e689f22a281d01a2281786974bb4a0704f9f60da9ae587a1961f215a"} Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.754918 4632 scope.go:117] "RemoveContainer" containerID="0658b9380c8dea41fac6f5c33575cbf30ed5fd2ef81dea8bf6d3489a0c70df49" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.768983 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.780286 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.792957 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.794611 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.796888 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.797145 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.797283 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.800490 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829106 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-config-data\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829539 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829630 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bt2r\" (UniqueName: \"kubernetes.io/projected/9565ad41-8a6b-461d-8299-e8fe256d30eb-kube-api-access-8bt2r\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829770 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9565ad41-8a6b-461d-8299-e8fe256d30eb-logs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.829819 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.931788 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-config-data\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932059 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932123 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bt2r\" (UniqueName: \"kubernetes.io/projected/9565ad41-8a6b-461d-8299-e8fe256d30eb-kube-api-access-8bt2r\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932236 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932260 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9565ad41-8a6b-461d-8299-e8fe256d30eb-logs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932287 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.932810 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9565ad41-8a6b-461d-8299-e8fe256d30eb-logs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.935598 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-public-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.935864 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-internal-tls-certs\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.935963 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.936375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9565ad41-8a6b-461d-8299-e8fe256d30eb-config-data\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:29 crc kubenswrapper[4632]: I1201 07:00:29.948718 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bt2r\" (UniqueName: \"kubernetes.io/projected/9565ad41-8a6b-461d-8299-e8fe256d30eb-kube-api-access-8bt2r\") pod \"nova-api-0\" (UID: \"9565ad41-8a6b-461d-8299-e8fe256d30eb\") " pod="openstack/nova-api-0" Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.119051 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.542470 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.749152 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8beb9f92-1512-4843-a060-e7407372d147","Type":"ContainerStarted","Data":"60103ab8d649a84621d3ecb11e969068eca7afac4833cd0d3251b94a1f7e1f73"} Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.749183 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"8beb9f92-1512-4843-a060-e7407372d147","Type":"ContainerStarted","Data":"b8108e038d8d39af7d6364ea330adf42584b53be5fd9953c01535e1df114a740"} Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.764225 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00425bd3-e9db-4a5b-a064-9c75692cf6c1" path="/var/lib/kubelet/pods/00425bd3-e9db-4a5b-a064-9c75692cf6c1/volumes" Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.764951 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56716ad7-406c-49f8-bbc8-4ba990861e48" path="/var/lib/kubelet/pods/56716ad7-406c-49f8-bbc8-4ba990861e48/volumes" Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.765524 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9565ad41-8a6b-461d-8299-e8fe256d30eb","Type":"ContainerStarted","Data":"2ee7456d14c759626b678370417af576e5efab976e1ba7dc2387d5fb3635117f"} Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.765543 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9565ad41-8a6b-461d-8299-e8fe256d30eb","Type":"ContainerStarted","Data":"911809aef282354e1b1b279128850b021c08c2b8ae61d83cfad8cc078e85c0be"} Dec 01 07:00:30 crc kubenswrapper[4632]: I1201 07:00:30.822153 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8221269429999998 podStartE2EDuration="2.822126943s" podCreationTimestamp="2025-12-01 07:00:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:30.798003047 +0000 UTC m=+1040.363016020" watchObservedRunningTime="2025-12-01 07:00:30.822126943 +0000 UTC m=+1040.387139917" Dec 01 07:00:31 crc kubenswrapper[4632]: I1201 07:00:31.765474 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"9565ad41-8a6b-461d-8299-e8fe256d30eb","Type":"ContainerStarted","Data":"5c829edbca84dd463b1f5a64b2d8bab875ccf2d0e1658eccba1e0a83ad6a3c7a"} Dec 01 07:00:31 crc kubenswrapper[4632]: I1201 07:00:31.787748 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.787712563 podStartE2EDuration="2.787712563s" podCreationTimestamp="2025-12-01 07:00:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:00:31.784137508 +0000 UTC m=+1041.349150481" watchObservedRunningTime="2025-12-01 07:00:31.787712563 +0000 UTC m=+1041.352725535" Dec 01 07:00:32 crc kubenswrapper[4632]: I1201 07:00:32.042222 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 01 07:00:34 crc kubenswrapper[4632]: I1201 07:00:34.127291 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:00:34 crc kubenswrapper[4632]: I1201 07:00:34.127410 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 01 07:00:37 crc kubenswrapper[4632]: I1201 07:00:37.042199 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 01 07:00:37 crc kubenswrapper[4632]: I1201 07:00:37.069464 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 01 07:00:37 crc kubenswrapper[4632]: I1201 07:00:37.858454 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 01 07:00:39 crc kubenswrapper[4632]: I1201 07:00:39.128485 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:00:39 crc kubenswrapper[4632]: I1201 07:00:39.128565 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 01 07:00:40 crc kubenswrapper[4632]: I1201 07:00:40.119953 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:00:40 crc kubenswrapper[4632]: I1201 07:00:40.120346 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 01 07:00:40 crc kubenswrapper[4632]: I1201 07:00:40.147126 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8beb9f92-1512-4843-a060-e7407372d147" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:40 crc kubenswrapper[4632]: I1201 07:00:40.147542 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="8beb9f92-1512-4843-a060-e7407372d147" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.200:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:41 crc kubenswrapper[4632]: I1201 07:00:41.139568 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9565ad41-8a6b-461d-8299-e8fe256d30eb" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:41 crc kubenswrapper[4632]: I1201 07:00:41.139621 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="9565ad41-8a6b-461d-8299-e8fe256d30eb" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.201:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 01 07:00:42 crc kubenswrapper[4632]: I1201 07:00:42.814852 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 01 07:00:49 crc kubenswrapper[4632]: I1201 07:00:49.132638 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:00:49 crc kubenswrapper[4632]: I1201 07:00:49.133410 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 01 07:00:49 crc kubenswrapper[4632]: I1201 07:00:49.140143 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:00:49 crc kubenswrapper[4632]: I1201 07:00:49.140207 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.135117 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.135698 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.140917 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.142153 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.960675 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 01 07:00:50 crc kubenswrapper[4632]: I1201 07:00:50.966985 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 01 07:00:57 crc kubenswrapper[4632]: I1201 07:00:57.266579 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:00:58 crc kubenswrapper[4632]: I1201 07:00:58.111833 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.147832 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29409541-h825b"] Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.149252 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.158644 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29409541-h825b"] Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.262586 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.262907 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.263069 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.263239 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfqqj\" (UniqueName: \"kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.366272 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.366337 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.366385 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.366424 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfqqj\" (UniqueName: \"kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.373136 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.378158 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.381814 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfqqj\" (UniqueName: \"kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.392247 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle\") pod \"keystone-cron-29409541-h825b\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.481396 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:00 crc kubenswrapper[4632]: I1201 07:01:00.876167 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29409541-h825b"] Dec 01 07:01:01 crc kubenswrapper[4632]: I1201 07:01:01.060398 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29409541-h825b" event={"ID":"9b87ff38-8ea4-4e1e-8553-aa3069f0223a","Type":"ContainerStarted","Data":"4f09f4432bc0044f9d22d3566de092b5fa2769a76565f6b289fb407801d3f64e"} Dec 01 07:01:01 crc kubenswrapper[4632]: I1201 07:01:01.060687 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29409541-h825b" event={"ID":"9b87ff38-8ea4-4e1e-8553-aa3069f0223a","Type":"ContainerStarted","Data":"5cbe5ad34244d6a3c449ce076d518686ab419e6b846c83ab984a6b243337218c"} Dec 01 07:01:01 crc kubenswrapper[4632]: I1201 07:01:01.078652 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29409541-h825b" podStartSLOduration=1.078633875 podStartE2EDuration="1.078633875s" podCreationTimestamp="2025-12-01 07:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:01:01.074466985 +0000 UTC m=+1070.639479958" watchObservedRunningTime="2025-12-01 07:01:01.078633875 +0000 UTC m=+1070.643646848" Dec 01 07:01:01 crc kubenswrapper[4632]: I1201 07:01:01.880169 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="rabbitmq" containerID="cri-o://8fbdab565302cda55097719870e3fc369d5107311d572e57df688026b1c175a0" gracePeriod=604796 Dec 01 07:01:01 crc kubenswrapper[4632]: I1201 07:01:01.944703 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="rabbitmq" containerID="cri-o://66eb42a52fa597607f9038e2d80f7371efa6b20d3c1a584179ca0caa3e6d763b" gracePeriod=604797 Dec 01 07:01:03 crc kubenswrapper[4632]: I1201 07:01:03.078832 4632 generic.go:334] "Generic (PLEG): container finished" podID="9b87ff38-8ea4-4e1e-8553-aa3069f0223a" containerID="4f09f4432bc0044f9d22d3566de092b5fa2769a76565f6b289fb407801d3f64e" exitCode=0 Dec 01 07:01:03 crc kubenswrapper[4632]: I1201 07:01:03.078901 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29409541-h825b" event={"ID":"9b87ff38-8ea4-4e1e-8553-aa3069f0223a","Type":"ContainerDied","Data":"4f09f4432bc0044f9d22d3566de092b5fa2769a76565f6b289fb407801d3f64e"} Dec 01 07:01:03 crc kubenswrapper[4632]: E1201 07:01:03.354452 4632 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.26.49:47984->192.168.26.49:41407: read tcp 192.168.26.49:47984->192.168.26.49:41407: read: connection reset by peer Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.386470 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.424737 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.94:5671: connect: connection refused" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.458828 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfqqj\" (UniqueName: \"kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj\") pod \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.458961 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data\") pod \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.459218 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys\") pod \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.459282 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle\") pod \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\" (UID: \"9b87ff38-8ea4-4e1e-8553-aa3069f0223a\") " Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.466634 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj" (OuterVolumeSpecName: "kube-api-access-cfqqj") pod "9b87ff38-8ea4-4e1e-8553-aa3069f0223a" (UID: "9b87ff38-8ea4-4e1e-8553-aa3069f0223a"). InnerVolumeSpecName "kube-api-access-cfqqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.468501 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9b87ff38-8ea4-4e1e-8553-aa3069f0223a" (UID: "9b87ff38-8ea4-4e1e-8553-aa3069f0223a"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.485390 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b87ff38-8ea4-4e1e-8553-aa3069f0223a" (UID: "9b87ff38-8ea4-4e1e-8553-aa3069f0223a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.502534 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data" (OuterVolumeSpecName: "config-data") pod "9b87ff38-8ea4-4e1e-8553-aa3069f0223a" (UID: "9b87ff38-8ea4-4e1e-8553-aa3069f0223a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.561423 4632 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.561460 4632 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.561474 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfqqj\" (UniqueName: \"kubernetes.io/projected/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-kube-api-access-cfqqj\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.561516 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b87ff38-8ea4-4e1e-8553-aa3069f0223a-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:04 crc kubenswrapper[4632]: I1201 07:01:04.710653 4632 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.95:5671: connect: connection refused" Dec 01 07:01:05 crc kubenswrapper[4632]: I1201 07:01:05.103330 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29409541-h825b" event={"ID":"9b87ff38-8ea4-4e1e-8553-aa3069f0223a","Type":"ContainerDied","Data":"5cbe5ad34244d6a3c449ce076d518686ab419e6b846c83ab984a6b243337218c"} Dec 01 07:01:05 crc kubenswrapper[4632]: I1201 07:01:05.103425 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cbe5ad34244d6a3c449ce076d518686ab419e6b846c83ab984a6b243337218c" Dec 01 07:01:05 crc kubenswrapper[4632]: I1201 07:01:05.103477 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29409541-h825b" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.137133 4632 generic.go:334] "Generic (PLEG): container finished" podID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerID="66eb42a52fa597607f9038e2d80f7371efa6b20d3c1a584179ca0caa3e6d763b" exitCode=0 Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.137520 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerDied","Data":"66eb42a52fa597607f9038e2d80f7371efa6b20d3c1a584179ca0caa3e6d763b"} Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.141820 4632 generic.go:334] "Generic (PLEG): container finished" podID="67043517-303b-4159-a030-1192c39b98dd" containerID="8fbdab565302cda55097719870e3fc369d5107311d572e57df688026b1c175a0" exitCode=0 Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.141863 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerDied","Data":"8fbdab565302cda55097719870e3fc369d5107311d572e57df688026b1c175a0"} Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.300974 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.401913 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447171 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447333 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447424 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447492 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447515 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447540 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447556 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447606 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92w4z\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447625 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447646 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjg6x\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447686 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447729 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447756 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447782 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447802 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447818 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447848 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447865 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447898 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447924 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\" (UID: \"ff6e2f27-a2b5-4046-8e0e-dc495271a359\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447948 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447976 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf\") pod \"67043517-303b-4159-a030-1192c39b98dd\" (UID: \"67043517-303b-4159-a030-1192c39b98dd\") " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.447997 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.448409 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.449013 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.449796 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.451729 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.452103 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.452980 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.457561 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.466896 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z" (OuterVolumeSpecName: "kube-api-access-92w4z") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "kube-api-access-92w4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.467089 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.470698 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.470826 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x" (OuterVolumeSpecName: "kube-api-access-sjg6x") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "kube-api-access-sjg6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.484363 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.490083 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.494419 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info" (OuterVolumeSpecName: "pod-info") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.496805 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:08 crc kubenswrapper[4632]: E1201 07:01:08.497214 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="setup-container" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.497234 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="setup-container" Dec 01 07:01:08 crc kubenswrapper[4632]: E1201 07:01:08.497247 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b87ff38-8ea4-4e1e-8553-aa3069f0223a" containerName="keystone-cron" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.497254 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b87ff38-8ea4-4e1e-8553-aa3069f0223a" containerName="keystone-cron" Dec 01 07:01:08 crc kubenswrapper[4632]: E1201 07:01:08.497274 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.497280 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: E1201 07:01:08.497289 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.497295 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: E1201 07:01:08.497316 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="setup-container" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.497323 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="setup-container" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.499553 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.502885 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b87ff38-8ea4-4e1e-8553-aa3069f0223a" containerName="keystone-cron" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.502916 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.502925 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="67043517-303b-4159-a030-1192c39b98dd" containerName="rabbitmq" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.503989 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.506101 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.508134 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info" (OuterVolumeSpecName: "pod-info") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.509521 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data" (OuterVolumeSpecName: "config-data") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.512872 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.547680 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data" (OuterVolumeSpecName: "config-data") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549158 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549208 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549232 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549265 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549386 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549414 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549463 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8nh9\" (UniqueName: \"kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549509 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549521 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549532 4632 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ff6e2f27-a2b5-4046-8e0e-dc495271a359-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549542 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549551 4632 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549559 4632 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/67043517-303b-4159-a030-1192c39b98dd-pod-info\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549568 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549577 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549594 4632 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/67043517-303b-4159-a030-1192c39b98dd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549612 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549625 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549633 4632 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549641 4632 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ff6e2f27-a2b5-4046-8e0e-dc495271a359-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549649 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549657 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549664 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92w4z\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-kube-api-access-92w4z\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.549672 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjg6x\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-kube-api-access-sjg6x\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.572642 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.573191 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.583434 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf" (OuterVolumeSpecName: "server-conf") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.588693 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf" (OuterVolumeSpecName: "server-conf") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.630663 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "67043517-303b-4159-a030-1192c39b98dd" (UID: "67043517-303b-4159-a030-1192c39b98dd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.632396 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ff6e2f27-a2b5-4046-8e0e-dc495271a359" (UID: "ff6e2f27-a2b5-4046-8e0e-dc495271a359"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651128 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651216 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651251 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651333 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8nh9\" (UniqueName: \"kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651397 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651445 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651475 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651531 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ff6e2f27-a2b5-4046-8e0e-dc495271a359-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651553 4632 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ff6e2f27-a2b5-4046-8e0e-dc495271a359-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651565 4632 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/67043517-303b-4159-a030-1192c39b98dd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651579 4632 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/67043517-303b-4159-a030-1192c39b98dd-server-conf\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651601 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.651612 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.652190 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.652343 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.652513 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.653130 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.653135 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.653320 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.666462 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8nh9\" (UniqueName: \"kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9\") pod \"dnsmasq-dns-fc4f69dd7-f5mmw\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:08 crc kubenswrapper[4632]: I1201 07:01:08.822654 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.155863 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.155855 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"67043517-303b-4159-a030-1192c39b98dd","Type":"ContainerDied","Data":"eca8d2a80ee7de999e2e461fff68c8ca8c684bc394de05c91609f36c3cad9193"} Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.156334 4632 scope.go:117] "RemoveContainer" containerID="8fbdab565302cda55097719870e3fc369d5107311d572e57df688026b1c175a0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.164335 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ff6e2f27-a2b5-4046-8e0e-dc495271a359","Type":"ContainerDied","Data":"1492c7d3d20d10bed7fab58ccf81080919eb8c1d64c2bb4b2460261cff552550"} Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.164557 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.192865 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.206525 4632 scope.go:117] "RemoveContainer" containerID="9b9c7b2f6b6d2cb3fc2cb5af9ba4600c190b2153eb2f1f44d58f42a457c11540" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.232472 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.247857 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.261823 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.279557 4632 scope.go:117] "RemoveContainer" containerID="66eb42a52fa597607f9038e2d80f7371efa6b20d3c1a584179ca0caa3e6d763b" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.287561 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.289537 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.292438 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.292838 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.292965 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.293680 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.293818 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-g6b89" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.293926 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.298978 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.300069 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.305697 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.308800 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.312297 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.312828 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.312967 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6s96d" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.313077 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.312990 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.313606 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.314144 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.322733 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.339630 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.399953 4632 scope.go:117] "RemoveContainer" containerID="9dc2cfc06da8b761d9017a11b5369af0ede286129a1012ceeccb6b18a2e91a0d" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.472882 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.472981 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473060 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473097 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473177 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/02ca2059-fc5b-4d54-886b-e6de4f303d3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473270 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473320 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473370 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473458 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473487 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473535 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8nmf\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-kube-api-access-z8nmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473566 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473757 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473814 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.473939 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474011 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/02ca2059-fc5b-4d54-886b-e6de4f303d3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474051 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474085 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474143 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl8tq\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-kube-api-access-tl8tq\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474182 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474257 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.474284 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.578145 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579025 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579060 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579093 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8nmf\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-kube-api-access-z8nmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579119 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579157 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579179 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579211 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579244 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/02ca2059-fc5b-4d54-886b-e6de4f303d3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579273 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579299 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579327 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl8tq\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-kube-api-access-tl8tq\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579349 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579396 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579414 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579474 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579503 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579533 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579558 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579601 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/02ca2059-fc5b-4d54-886b-e6de4f303d3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579636 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.579662 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580007 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580149 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-config-data\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580162 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580211 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580434 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580584 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.580945 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.581302 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/02ca2059-fc5b-4d54-886b-e6de4f303d3b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.581922 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.585145 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.586134 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.587560 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.588845 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.590003 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.590569 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/02ca2059-fc5b-4d54-886b-e6de4f303d3b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.592225 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.594740 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.596670 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.596753 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.596921 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8nmf\" (UniqueName: \"kubernetes.io/projected/6fa564e2-c2c4-41f0-aa84-1431574a0a4b-kube-api-access-z8nmf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.597464 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/02ca2059-fc5b-4d54-886b-e6de4f303d3b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.599697 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl8tq\" (UniqueName: \"kubernetes.io/projected/02ca2059-fc5b-4d54-886b-e6de4f303d3b-kube-api-access-tl8tq\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.625968 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6fa564e2-c2c4-41f0-aa84-1431574a0a4b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.629614 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"02ca2059-fc5b-4d54-886b-e6de4f303d3b\") " pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.778209 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 01 07:01:09 crc kubenswrapper[4632]: I1201 07:01:09.813440 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.179938 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerID="f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586" exitCode=0 Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.180057 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" event={"ID":"bc179d6f-c46d-4939-8bc9-101cd6b67eaa","Type":"ContainerDied","Data":"f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586"} Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.180460 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" event={"ID":"bc179d6f-c46d-4939-8bc9-101cd6b67eaa","Type":"ContainerStarted","Data":"1ce44e5a21b89802657f50c769f1cd9436e33e2ad3250be8d03589b64a8e40f5"} Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.222323 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 01 07:01:10 crc kubenswrapper[4632]: W1201 07:01:10.223996 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod02ca2059_fc5b_4d54_886b_e6de4f303d3b.slice/crio-39ef82b23e8dd7ed1f66bd17624a5cef388f1b314bf68a2f87b3ae885d9943b6 WatchSource:0}: Error finding container 39ef82b23e8dd7ed1f66bd17624a5cef388f1b314bf68a2f87b3ae885d9943b6: Status 404 returned error can't find the container with id 39ef82b23e8dd7ed1f66bd17624a5cef388f1b314bf68a2f87b3ae885d9943b6 Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.304285 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 01 07:01:10 crc kubenswrapper[4632]: W1201 07:01:10.312756 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6fa564e2_c2c4_41f0_aa84_1431574a0a4b.slice/crio-9adf008d48fd3499a45a9f5cc1a07b0f5ef499d83c2038c0fc4511d6f1766f73 WatchSource:0}: Error finding container 9adf008d48fd3499a45a9f5cc1a07b0f5ef499d83c2038c0fc4511d6f1766f73: Status 404 returned error can't find the container with id 9adf008d48fd3499a45a9f5cc1a07b0f5ef499d83c2038c0fc4511d6f1766f73 Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.765313 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67043517-303b-4159-a030-1192c39b98dd" path="/var/lib/kubelet/pods/67043517-303b-4159-a030-1192c39b98dd/volumes" Dec 01 07:01:10 crc kubenswrapper[4632]: I1201 07:01:10.766811 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff6e2f27-a2b5-4046-8e0e-dc495271a359" path="/var/lib/kubelet/pods/ff6e2f27-a2b5-4046-8e0e-dc495271a359/volumes" Dec 01 07:01:11 crc kubenswrapper[4632]: I1201 07:01:11.194258 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6fa564e2-c2c4-41f0-aa84-1431574a0a4b","Type":"ContainerStarted","Data":"9adf008d48fd3499a45a9f5cc1a07b0f5ef499d83c2038c0fc4511d6f1766f73"} Dec 01 07:01:11 crc kubenswrapper[4632]: I1201 07:01:11.195792 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"02ca2059-fc5b-4d54-886b-e6de4f303d3b","Type":"ContainerStarted","Data":"39ef82b23e8dd7ed1f66bd17624a5cef388f1b314bf68a2f87b3ae885d9943b6"} Dec 01 07:01:11 crc kubenswrapper[4632]: I1201 07:01:11.198332 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" event={"ID":"bc179d6f-c46d-4939-8bc9-101cd6b67eaa","Type":"ContainerStarted","Data":"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54"} Dec 01 07:01:11 crc kubenswrapper[4632]: I1201 07:01:11.198513 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:11 crc kubenswrapper[4632]: I1201 07:01:11.222402 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" podStartSLOduration=3.222382781 podStartE2EDuration="3.222382781s" podCreationTimestamp="2025-12-01 07:01:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:01:11.219916829 +0000 UTC m=+1080.784929802" watchObservedRunningTime="2025-12-01 07:01:11.222382781 +0000 UTC m=+1080.787395754" Dec 01 07:01:12 crc kubenswrapper[4632]: I1201 07:01:12.208665 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"02ca2059-fc5b-4d54-886b-e6de4f303d3b","Type":"ContainerStarted","Data":"00b919fd9a2dea8c7dd18328108aa01e33213e8041e35923f34bf3f32abd97ae"} Dec 01 07:01:12 crc kubenswrapper[4632]: I1201 07:01:12.211262 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6fa564e2-c2c4-41f0-aa84-1431574a0a4b","Type":"ContainerStarted","Data":"931925129e70472b942fd76b50fbf465ea3944235c6d7566822854d65a3a250a"} Dec 01 07:01:18 crc kubenswrapper[4632]: I1201 07:01:18.823574 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:18 crc kubenswrapper[4632]: I1201 07:01:18.873699 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:01:18 crc kubenswrapper[4632]: I1201 07:01:18.873954 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7476c47877-cskmq" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="dnsmasq-dns" containerID="cri-o://bfd46054c67efe8a4f2837fd9ab612efc70a31f75726d2c418691015efef8b58" gracePeriod=10 Dec 01 07:01:18 crc kubenswrapper[4632]: I1201 07:01:18.994014 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6cd8587fff-2wtcz"] Dec 01 07:01:18 crc kubenswrapper[4632]: I1201 07:01:18.996716 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.018487 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd8587fff-2wtcz"] Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077064 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077295 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077444 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-config\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077546 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077679 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-svc\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077783 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.077870 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcxbr\" (UniqueName: \"kubernetes.io/projected/940ceb01-c88a-4012-bd18-d87ef90d7549-kube-api-access-bcxbr\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180266 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-svc\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180334 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180379 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcxbr\" (UniqueName: \"kubernetes.io/projected/940ceb01-c88a-4012-bd18-d87ef90d7549-kube-api-access-bcxbr\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180405 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180457 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180500 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-config\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.180518 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.182027 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-swift-storage-0\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.182395 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-config\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.182537 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-openstack-edpm-ipam\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.183562 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-dns-svc\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.183679 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-nb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.186346 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/940ceb01-c88a-4012-bd18-d87ef90d7549-ovsdbserver-sb\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.205316 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcxbr\" (UniqueName: \"kubernetes.io/projected/940ceb01-c88a-4012-bd18-d87ef90d7549-kube-api-access-bcxbr\") pod \"dnsmasq-dns-6cd8587fff-2wtcz\" (UID: \"940ceb01-c88a-4012-bd18-d87ef90d7549\") " pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.278840 4632 generic.go:334] "Generic (PLEG): container finished" podID="09d6a1d1-b814-4262-9437-f140536ce50d" containerID="bfd46054c67efe8a4f2837fd9ab612efc70a31f75726d2c418691015efef8b58" exitCode=0 Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.278889 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7476c47877-cskmq" event={"ID":"09d6a1d1-b814-4262-9437-f140536ce50d","Type":"ContainerDied","Data":"bfd46054c67efe8a4f2837fd9ab612efc70a31f75726d2c418691015efef8b58"} Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.278920 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7476c47877-cskmq" event={"ID":"09d6a1d1-b814-4262-9437-f140536ce50d","Type":"ContainerDied","Data":"d9c2290b55fde864bc4d4bab7e4a73d923593679890c488b124428c53f766364"} Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.278931 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9c2290b55fde864bc4d4bab7e4a73d923593679890c488b124428c53f766364" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.308697 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.362833 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383168 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckkg8\" (UniqueName: \"kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383548 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383579 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383681 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383868 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.383918 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb\") pod \"09d6a1d1-b814-4262-9437-f140536ce50d\" (UID: \"09d6a1d1-b814-4262-9437-f140536ce50d\") " Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.392913 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8" (OuterVolumeSpecName: "kube-api-access-ckkg8") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "kube-api-access-ckkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.436223 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config" (OuterVolumeSpecName: "config") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.441564 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.449090 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.459628 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.465153 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "09d6a1d1-b814-4262-9437-f140536ce50d" (UID: "09d6a1d1-b814-4262-9437-f140536ce50d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487257 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckkg8\" (UniqueName: \"kubernetes.io/projected/09d6a1d1-b814-4262-9437-f140536ce50d-kube-api-access-ckkg8\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487286 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487300 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487310 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487320 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.487329 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/09d6a1d1-b814-4262-9437-f140536ce50d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.498068 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.498120 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:01:19 crc kubenswrapper[4632]: I1201 07:01:19.786193 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6cd8587fff-2wtcz"] Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.292744 4632 generic.go:334] "Generic (PLEG): container finished" podID="940ceb01-c88a-4012-bd18-d87ef90d7549" containerID="374eda8a42d9334882b1e863a3eb585d652d77696cd9dc9ab79c13acf73fa45b" exitCode=0 Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.292877 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" event={"ID":"940ceb01-c88a-4012-bd18-d87ef90d7549","Type":"ContainerDied","Data":"374eda8a42d9334882b1e863a3eb585d652d77696cd9dc9ab79c13acf73fa45b"} Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.293271 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" event={"ID":"940ceb01-c88a-4012-bd18-d87ef90d7549","Type":"ContainerStarted","Data":"32631e685c9f7d8eb4c17411d9afbcc6d3d3dcfbbbc31f540eaeae0552900807"} Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.293278 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7476c47877-cskmq" Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.472551 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.496205 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7476c47877-cskmq"] Dec 01 07:01:20 crc kubenswrapper[4632]: I1201 07:01:20.763919 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" path="/var/lib/kubelet/pods/09d6a1d1-b814-4262-9437-f140536ce50d/volumes" Dec 01 07:01:21 crc kubenswrapper[4632]: I1201 07:01:21.310088 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" event={"ID":"940ceb01-c88a-4012-bd18-d87ef90d7549","Type":"ContainerStarted","Data":"071d0be9306423e54874a079e2fce56f061da37815b968b12c11a6806ac1fd27"} Dec 01 07:01:21 crc kubenswrapper[4632]: I1201 07:01:21.310945 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:21 crc kubenswrapper[4632]: I1201 07:01:21.338216 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" podStartSLOduration=3.338196831 podStartE2EDuration="3.338196831s" podCreationTimestamp="2025-12-01 07:01:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:01:21.330432073 +0000 UTC m=+1090.895445046" watchObservedRunningTime="2025-12-01 07:01:21.338196831 +0000 UTC m=+1090.903209803" Dec 01 07:01:29 crc kubenswrapper[4632]: I1201 07:01:29.364600 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6cd8587fff-2wtcz" Dec 01 07:01:29 crc kubenswrapper[4632]: I1201 07:01:29.422715 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:29 crc kubenswrapper[4632]: I1201 07:01:29.422984 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="dnsmasq-dns" containerID="cri-o://2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54" gracePeriod=10 Dec 01 07:01:29 crc kubenswrapper[4632]: I1201 07:01:29.851171 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.010940 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011007 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011079 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8nh9\" (UniqueName: \"kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011137 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011187 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011211 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.011337 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb\") pod \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\" (UID: \"bc179d6f-c46d-4939-8bc9-101cd6b67eaa\") " Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.016624 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9" (OuterVolumeSpecName: "kube-api-access-t8nh9") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "kube-api-access-t8nh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.049080 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.050634 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.055685 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.057678 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.061011 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.063863 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config" (OuterVolumeSpecName: "config") pod "bc179d6f-c46d-4939-8bc9-101cd6b67eaa" (UID: "bc179d6f-c46d-4939-8bc9-101cd6b67eaa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115086 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115120 4632 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115130 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8nh9\" (UniqueName: \"kubernetes.io/projected/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-kube-api-access-t8nh9\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115147 4632 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115159 4632 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115170 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.115180 4632 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc179d6f-c46d-4939-8bc9-101cd6b67eaa-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.410730 4632 generic.go:334] "Generic (PLEG): container finished" podID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerID="2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54" exitCode=0 Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.410801 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" event={"ID":"bc179d6f-c46d-4939-8bc9-101cd6b67eaa","Type":"ContainerDied","Data":"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54"} Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.410845 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" event={"ID":"bc179d6f-c46d-4939-8bc9-101cd6b67eaa","Type":"ContainerDied","Data":"1ce44e5a21b89802657f50c769f1cd9436e33e2ad3250be8d03589b64a8e40f5"} Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.410871 4632 scope.go:117] "RemoveContainer" containerID="2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.410927 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fc4f69dd7-f5mmw" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.438305 4632 scope.go:117] "RemoveContainer" containerID="f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.440738 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.448998 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fc4f69dd7-f5mmw"] Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.479290 4632 scope.go:117] "RemoveContainer" containerID="2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54" Dec 01 07:01:30 crc kubenswrapper[4632]: E1201 07:01:30.479696 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54\": container with ID starting with 2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54 not found: ID does not exist" containerID="2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.479740 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54"} err="failed to get container status \"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54\": rpc error: code = NotFound desc = could not find container \"2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54\": container with ID starting with 2a156f969d95add92f5432211ab5745133c04ad796797e972f9e095f81ab1f54 not found: ID does not exist" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.479770 4632 scope.go:117] "RemoveContainer" containerID="f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586" Dec 01 07:01:30 crc kubenswrapper[4632]: E1201 07:01:30.480400 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586\": container with ID starting with f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586 not found: ID does not exist" containerID="f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.480460 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586"} err="failed to get container status \"f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586\": rpc error: code = NotFound desc = could not find container \"f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586\": container with ID starting with f211616fb4f27c3ced4fe2d5b78a2c1b6dd9bb220476f52b15dc82367767c586 not found: ID does not exist" Dec 01 07:01:30 crc kubenswrapper[4632]: I1201 07:01:30.762995 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" path="/var/lib/kubelet/pods/bc179d6f-c46d-4939-8bc9-101cd6b67eaa/volumes" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.579639 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd"] Dec 01 07:01:42 crc kubenswrapper[4632]: E1201 07:01:42.580624 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580638 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: E1201 07:01:42.580665 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="init" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580671 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="init" Dec 01 07:01:42 crc kubenswrapper[4632]: E1201 07:01:42.580691 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580696 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: E1201 07:01:42.580710 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="init" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580717 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="init" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580918 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc179d6f-c46d-4939-8bc9-101cd6b67eaa" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.580941 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d6a1d1-b814-4262-9437-f140536ce50d" containerName="dnsmasq-dns" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.581572 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.584197 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.584333 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.585249 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.585327 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.585336 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.585396 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zlnp\" (UniqueName: \"kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.585487 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.586053 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.592985 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd"] Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.687388 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.687534 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.687583 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zlnp\" (UniqueName: \"kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.687688 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.695227 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.696003 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.696060 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.703194 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zlnp\" (UniqueName: \"kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:42 crc kubenswrapper[4632]: I1201 07:01:42.896577 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:01:43 crc kubenswrapper[4632]: I1201 07:01:43.388874 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd"] Dec 01 07:01:43 crc kubenswrapper[4632]: I1201 07:01:43.390978 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:01:43 crc kubenswrapper[4632]: I1201 07:01:43.539286 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" event={"ID":"c57a7c91-453f-4fce-8410-abffb3ffe651","Type":"ContainerStarted","Data":"11554d357706793542e233a812c20bba4e7ca78141b3bd629b264fbaeb3de621"} Dec 01 07:01:44 crc kubenswrapper[4632]: I1201 07:01:44.553153 4632 generic.go:334] "Generic (PLEG): container finished" podID="02ca2059-fc5b-4d54-886b-e6de4f303d3b" containerID="00b919fd9a2dea8c7dd18328108aa01e33213e8041e35923f34bf3f32abd97ae" exitCode=0 Dec 01 07:01:44 crc kubenswrapper[4632]: I1201 07:01:44.553237 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"02ca2059-fc5b-4d54-886b-e6de4f303d3b","Type":"ContainerDied","Data":"00b919fd9a2dea8c7dd18328108aa01e33213e8041e35923f34bf3f32abd97ae"} Dec 01 07:01:44 crc kubenswrapper[4632]: I1201 07:01:44.558031 4632 generic.go:334] "Generic (PLEG): container finished" podID="6fa564e2-c2c4-41f0-aa84-1431574a0a4b" containerID="931925129e70472b942fd76b50fbf465ea3944235c6d7566822854d65a3a250a" exitCode=0 Dec 01 07:01:44 crc kubenswrapper[4632]: I1201 07:01:44.558134 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6fa564e2-c2c4-41f0-aa84-1431574a0a4b","Type":"ContainerDied","Data":"931925129e70472b942fd76b50fbf465ea3944235c6d7566822854d65a3a250a"} Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.601956 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"6fa564e2-c2c4-41f0-aa84-1431574a0a4b","Type":"ContainerStarted","Data":"30243cc8dae22f890bf02220dd96fcde433b4de7ff39fe0f8aa14ddf499f54b8"} Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.602486 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.606258 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"02ca2059-fc5b-4d54-886b-e6de4f303d3b","Type":"ContainerStarted","Data":"3cdfafde24f011bff5e2d543b461cf6224cb242769625d72c9c90b8298bd0284"} Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.606456 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.628834 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.628818395 podStartE2EDuration="36.628818395s" podCreationTimestamp="2025-12-01 07:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:01:45.625727203 +0000 UTC m=+1115.190740175" watchObservedRunningTime="2025-12-01 07:01:45.628818395 +0000 UTC m=+1115.193831367" Dec 01 07:01:45 crc kubenswrapper[4632]: I1201 07:01:45.645167 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.645133584999996 podStartE2EDuration="36.645133585s" podCreationTimestamp="2025-12-01 07:01:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:01:45.644082091 +0000 UTC m=+1115.209095064" watchObservedRunningTime="2025-12-01 07:01:45.645133585 +0000 UTC m=+1115.210146558" Dec 01 07:01:49 crc kubenswrapper[4632]: I1201 07:01:49.498439 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:01:49 crc kubenswrapper[4632]: I1201 07:01:49.498826 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:01:51 crc kubenswrapper[4632]: I1201 07:01:51.680328 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" event={"ID":"c57a7c91-453f-4fce-8410-abffb3ffe651","Type":"ContainerStarted","Data":"7dd967faf5d52cf59d2596c987da572600a2e56f794d58dbb6b55ca53bb3f0e3"} Dec 01 07:01:51 crc kubenswrapper[4632]: I1201 07:01:51.706084 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" podStartSLOduration=1.80414622 podStartE2EDuration="9.706065572s" podCreationTimestamp="2025-12-01 07:01:42 +0000 UTC" firstStartedPulling="2025-12-01 07:01:43.390743333 +0000 UTC m=+1112.955756306" lastFinishedPulling="2025-12-01 07:01:51.292662685 +0000 UTC m=+1120.857675658" observedRunningTime="2025-12-01 07:01:51.700809576 +0000 UTC m=+1121.265822549" watchObservedRunningTime="2025-12-01 07:01:51.706065572 +0000 UTC m=+1121.271078546" Dec 01 07:01:59 crc kubenswrapper[4632]: I1201 07:01:59.782568 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 01 07:01:59 crc kubenswrapper[4632]: I1201 07:01:59.819543 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 01 07:02:02 crc kubenswrapper[4632]: I1201 07:02:02.781187 4632 generic.go:334] "Generic (PLEG): container finished" podID="c57a7c91-453f-4fce-8410-abffb3ffe651" containerID="7dd967faf5d52cf59d2596c987da572600a2e56f794d58dbb6b55ca53bb3f0e3" exitCode=0 Dec 01 07:02:02 crc kubenswrapper[4632]: I1201 07:02:02.781280 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" event={"ID":"c57a7c91-453f-4fce-8410-abffb3ffe651","Type":"ContainerDied","Data":"7dd967faf5d52cf59d2596c987da572600a2e56f794d58dbb6b55ca53bb3f0e3"} Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.146248 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.283279 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory\") pod \"c57a7c91-453f-4fce-8410-abffb3ffe651\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.283367 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key\") pod \"c57a7c91-453f-4fce-8410-abffb3ffe651\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.283461 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle\") pod \"c57a7c91-453f-4fce-8410-abffb3ffe651\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.283505 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zlnp\" (UniqueName: \"kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp\") pod \"c57a7c91-453f-4fce-8410-abffb3ffe651\" (UID: \"c57a7c91-453f-4fce-8410-abffb3ffe651\") " Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.294612 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp" (OuterVolumeSpecName: "kube-api-access-4zlnp") pod "c57a7c91-453f-4fce-8410-abffb3ffe651" (UID: "c57a7c91-453f-4fce-8410-abffb3ffe651"). InnerVolumeSpecName "kube-api-access-4zlnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.294873 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "c57a7c91-453f-4fce-8410-abffb3ffe651" (UID: "c57a7c91-453f-4fce-8410-abffb3ffe651"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.306815 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory" (OuterVolumeSpecName: "inventory") pod "c57a7c91-453f-4fce-8410-abffb3ffe651" (UID: "c57a7c91-453f-4fce-8410-abffb3ffe651"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.308638 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c57a7c91-453f-4fce-8410-abffb3ffe651" (UID: "c57a7c91-453f-4fce-8410-abffb3ffe651"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.386341 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.386406 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.386423 4632 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c57a7c91-453f-4fce-8410-abffb3ffe651-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.386444 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zlnp\" (UniqueName: \"kubernetes.io/projected/c57a7c91-453f-4fce-8410-abffb3ffe651-kube-api-access-4zlnp\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.799905 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" event={"ID":"c57a7c91-453f-4fce-8410-abffb3ffe651","Type":"ContainerDied","Data":"11554d357706793542e233a812c20bba4e7ca78141b3bd629b264fbaeb3de621"} Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.799963 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11554d357706793542e233a812c20bba4e7ca78141b3bd629b264fbaeb3de621" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.800040 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.862644 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb"] Dec 01 07:02:04 crc kubenswrapper[4632]: E1201 07:02:04.863206 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c57a7c91-453f-4fce-8410-abffb3ffe651" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.863232 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c57a7c91-453f-4fce-8410-abffb3ffe651" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.863519 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c57a7c91-453f-4fce-8410-abffb3ffe651" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.864240 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.865971 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.866208 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.866457 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.868028 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.872132 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb"] Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.894729 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.894812 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwmmn\" (UniqueName: \"kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.894914 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.996895 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwmmn\" (UniqueName: \"kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.996977 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:04 crc kubenswrapper[4632]: I1201 07:02:04.997080 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.002704 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.003155 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.013682 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwmmn\" (UniqueName: \"kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-fx2pb\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.181347 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.689628 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb"] Dec 01 07:02:05 crc kubenswrapper[4632]: I1201 07:02:05.811731 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" event={"ID":"29eb2b86-6594-44a0-a146-073da23a9341","Type":"ContainerStarted","Data":"1218002a9ec9e1b3f9519baaa21e7561a3bd5a256c126b2d3aacb354c0970363"} Dec 01 07:02:06 crc kubenswrapper[4632]: I1201 07:02:06.826073 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" event={"ID":"29eb2b86-6594-44a0-a146-073da23a9341","Type":"ContainerStarted","Data":"ebf2367c6c6ee0de80cdf5586bcec718a4d7119307482e975c1a7b28576c3ed4"} Dec 01 07:02:06 crc kubenswrapper[4632]: I1201 07:02:06.850562 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" podStartSLOduration=2.274463656 podStartE2EDuration="2.850540405s" podCreationTimestamp="2025-12-01 07:02:04 +0000 UTC" firstStartedPulling="2025-12-01 07:02:05.675069998 +0000 UTC m=+1135.240082971" lastFinishedPulling="2025-12-01 07:02:06.251146747 +0000 UTC m=+1135.816159720" observedRunningTime="2025-12-01 07:02:06.844038249 +0000 UTC m=+1136.409051222" watchObservedRunningTime="2025-12-01 07:02:06.850540405 +0000 UTC m=+1136.415553379" Dec 01 07:02:08 crc kubenswrapper[4632]: I1201 07:02:08.846633 4632 generic.go:334] "Generic (PLEG): container finished" podID="29eb2b86-6594-44a0-a146-073da23a9341" containerID="ebf2367c6c6ee0de80cdf5586bcec718a4d7119307482e975c1a7b28576c3ed4" exitCode=0 Dec 01 07:02:08 crc kubenswrapper[4632]: I1201 07:02:08.846732 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" event={"ID":"29eb2b86-6594-44a0-a146-073da23a9341","Type":"ContainerDied","Data":"ebf2367c6c6ee0de80cdf5586bcec718a4d7119307482e975c1a7b28576c3ed4"} Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.196515 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.204086 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwmmn\" (UniqueName: \"kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn\") pod \"29eb2b86-6594-44a0-a146-073da23a9341\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.204455 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory\") pod \"29eb2b86-6594-44a0-a146-073da23a9341\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.204602 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") pod \"29eb2b86-6594-44a0-a146-073da23a9341\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.210867 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn" (OuterVolumeSpecName: "kube-api-access-kwmmn") pod "29eb2b86-6594-44a0-a146-073da23a9341" (UID: "29eb2b86-6594-44a0-a146-073da23a9341"). InnerVolumeSpecName "kube-api-access-kwmmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:02:10 crc kubenswrapper[4632]: E1201 07:02:10.230305 4632 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key podName:29eb2b86-6594-44a0-a146-073da23a9341 nodeName:}" failed. No retries permitted until 2025-12-01 07:02:10.730273192 +0000 UTC m=+1140.295286165 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key" (UniqueName: "kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key") pod "29eb2b86-6594-44a0-a146-073da23a9341" (UID: "29eb2b86-6594-44a0-a146-073da23a9341") : error deleting /var/lib/kubelet/pods/29eb2b86-6594-44a0-a146-073da23a9341/volume-subpaths: remove /var/lib/kubelet/pods/29eb2b86-6594-44a0-a146-073da23a9341/volume-subpaths: no such file or directory Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.233309 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory" (OuterVolumeSpecName: "inventory") pod "29eb2b86-6594-44a0-a146-073da23a9341" (UID: "29eb2b86-6594-44a0-a146-073da23a9341"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.307699 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwmmn\" (UniqueName: \"kubernetes.io/projected/29eb2b86-6594-44a0-a146-073da23a9341-kube-api-access-kwmmn\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.307732 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.816217 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") pod \"29eb2b86-6594-44a0-a146-073da23a9341\" (UID: \"29eb2b86-6594-44a0-a146-073da23a9341\") " Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.819660 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "29eb2b86-6594-44a0-a146-073da23a9341" (UID: "29eb2b86-6594-44a0-a146-073da23a9341"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.867276 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" event={"ID":"29eb2b86-6594-44a0-a146-073da23a9341","Type":"ContainerDied","Data":"1218002a9ec9e1b3f9519baaa21e7561a3bd5a256c126b2d3aacb354c0970363"} Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.867339 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1218002a9ec9e1b3f9519baaa21e7561a3bd5a256c126b2d3aacb354c0970363" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.867370 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-fx2pb" Dec 01 07:02:10 crc kubenswrapper[4632]: I1201 07:02:10.919556 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/29eb2b86-6594-44a0-a146-073da23a9341-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.030370 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w"] Dec 01 07:02:11 crc kubenswrapper[4632]: E1201 07:02:11.031159 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29eb2b86-6594-44a0-a146-073da23a9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.031182 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="29eb2b86-6594-44a0-a146-073da23a9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.031463 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="29eb2b86-6594-44a0-a146-073da23a9341" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.032234 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.035154 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.035195 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.035214 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.036649 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.039322 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w"] Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.123331 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s78hc\" (UniqueName: \"kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.123627 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.123798 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.124099 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.226785 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.227876 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.228197 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.228269 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s78hc\" (UniqueName: \"kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.231343 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.232874 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.233597 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.246098 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s78hc\" (UniqueName: \"kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.353634 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.856309 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w"] Dec 01 07:02:11 crc kubenswrapper[4632]: W1201 07:02:11.856984 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa69e48_53f9_4bb5_9e11_a9afde0d8912.slice/crio-f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c WatchSource:0}: Error finding container f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c: Status 404 returned error can't find the container with id f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c Dec 01 07:02:11 crc kubenswrapper[4632]: I1201 07:02:11.875422 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" event={"ID":"0fa69e48-53f9-4bb5-9e11-a9afde0d8912","Type":"ContainerStarted","Data":"f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c"} Dec 01 07:02:12 crc kubenswrapper[4632]: I1201 07:02:12.890423 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" event={"ID":"0fa69e48-53f9-4bb5-9e11-a9afde0d8912","Type":"ContainerStarted","Data":"c8d8fd6bc89e13a64ceee7d1e87778635bdd25808425a45b372dc103bede3216"} Dec 01 07:02:12 crc kubenswrapper[4632]: I1201 07:02:12.918681 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" podStartSLOduration=1.178146716 podStartE2EDuration="1.918657457s" podCreationTimestamp="2025-12-01 07:02:11 +0000 UTC" firstStartedPulling="2025-12-01 07:02:11.859974141 +0000 UTC m=+1141.424987104" lastFinishedPulling="2025-12-01 07:02:12.600484872 +0000 UTC m=+1142.165497845" observedRunningTime="2025-12-01 07:02:12.902734817 +0000 UTC m=+1142.467747791" watchObservedRunningTime="2025-12-01 07:02:12.918657457 +0000 UTC m=+1142.483670430" Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.498033 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.499714 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.499847 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.500864 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.500984 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863" gracePeriod=600 Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.969017 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863" exitCode=0 Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.969255 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863"} Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.969295 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb"} Dec 01 07:02:19 crc kubenswrapper[4632]: I1201 07:02:19.969316 4632 scope.go:117] "RemoveContainer" containerID="1813c53cd60de5bffa918f4bfd6831c16142699025e22227df4ba9598c98e491" Dec 01 07:03:15 crc kubenswrapper[4632]: I1201 07:03:15.218993 4632 scope.go:117] "RemoveContainer" containerID="bff8a43eed106d70912c61bd4195a94ee6f21d8c63aa0225faf8a293e87ba298" Dec 01 07:04:15 crc kubenswrapper[4632]: I1201 07:04:15.284440 4632 scope.go:117] "RemoveContainer" containerID="7187ccc6af03ccdc12b795755e6738169c6b5972ead85afb7a24f5dc83d66285" Dec 01 07:04:15 crc kubenswrapper[4632]: I1201 07:04:15.312112 4632 scope.go:117] "RemoveContainer" containerID="a8228ea7fd5bee8cdc77beba5314e95f8c03d1ba22d248e62b16d61b653888c4" Dec 01 07:04:19 crc kubenswrapper[4632]: I1201 07:04:19.497531 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:04:19 crc kubenswrapper[4632]: I1201 07:04:19.498146 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:04:49 crc kubenswrapper[4632]: I1201 07:04:49.498023 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:04:49 crc kubenswrapper[4632]: I1201 07:04:49.498711 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:05:10 crc kubenswrapper[4632]: I1201 07:05:10.561297 4632 generic.go:334] "Generic (PLEG): container finished" podID="0fa69e48-53f9-4bb5-9e11-a9afde0d8912" containerID="c8d8fd6bc89e13a64ceee7d1e87778635bdd25808425a45b372dc103bede3216" exitCode=0 Dec 01 07:05:10 crc kubenswrapper[4632]: I1201 07:05:10.561338 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" event={"ID":"0fa69e48-53f9-4bb5-9e11-a9afde0d8912","Type":"ContainerDied","Data":"c8d8fd6bc89e13a64ceee7d1e87778635bdd25808425a45b372dc103bede3216"} Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.000082 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.149681 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key\") pod \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.149794 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s78hc\" (UniqueName: \"kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc\") pod \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.149824 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory\") pod \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.149967 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle\") pod \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\" (UID: \"0fa69e48-53f9-4bb5-9e11-a9afde0d8912\") " Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.155528 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc" (OuterVolumeSpecName: "kube-api-access-s78hc") pod "0fa69e48-53f9-4bb5-9e11-a9afde0d8912" (UID: "0fa69e48-53f9-4bb5-9e11-a9afde0d8912"). InnerVolumeSpecName "kube-api-access-s78hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.163717 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "0fa69e48-53f9-4bb5-9e11-a9afde0d8912" (UID: "0fa69e48-53f9-4bb5-9e11-a9afde0d8912"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.173668 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory" (OuterVolumeSpecName: "inventory") pod "0fa69e48-53f9-4bb5-9e11-a9afde0d8912" (UID: "0fa69e48-53f9-4bb5-9e11-a9afde0d8912"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.174756 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0fa69e48-53f9-4bb5-9e11-a9afde0d8912" (UID: "0fa69e48-53f9-4bb5-9e11-a9afde0d8912"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.252333 4632 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.252381 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.252395 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s78hc\" (UniqueName: \"kubernetes.io/projected/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-kube-api-access-s78hc\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.252406 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0fa69e48-53f9-4bb5-9e11-a9afde0d8912-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.585852 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" event={"ID":"0fa69e48-53f9-4bb5-9e11-a9afde0d8912","Type":"ContainerDied","Data":"f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c"} Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.585913 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4eda1d1270b8616d6dd62c8e97cfae90af5aa6ae601c565768a00142308587c" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.586018 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.650804 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl"] Dec 01 07:05:12 crc kubenswrapper[4632]: E1201 07:05:12.651327 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa69e48-53f9-4bb5-9e11-a9afde0d8912" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.651348 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa69e48-53f9-4bb5-9e11-a9afde0d8912" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.651654 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa69e48-53f9-4bb5-9e11-a9afde0d8912" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.652484 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.654392 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.654579 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.655011 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.658131 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.659524 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.659589 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m5c2\" (UniqueName: \"kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.659936 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.661917 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl"] Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.761172 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.761650 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.762095 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m5c2\" (UniqueName: \"kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.765401 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.766086 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.776507 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m5c2\" (UniqueName: \"kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-l89fl\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:12 crc kubenswrapper[4632]: I1201 07:05:12.974097 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:05:13 crc kubenswrapper[4632]: I1201 07:05:13.437622 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl"] Dec 01 07:05:13 crc kubenswrapper[4632]: I1201 07:05:13.595946 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" event={"ID":"6f79d230-5b05-468d-bf19-bb6a792c6b5d","Type":"ContainerStarted","Data":"d99a5add34f69d6f947cb6c3ef7a2d5098859678e89658256099ace641cad552"} Dec 01 07:05:14 crc kubenswrapper[4632]: I1201 07:05:14.604880 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" event={"ID":"6f79d230-5b05-468d-bf19-bb6a792c6b5d","Type":"ContainerStarted","Data":"e87a488571985b8d2df30c897695274218ffa1d0c92c42df549255ad50a7940f"} Dec 01 07:05:14 crc kubenswrapper[4632]: I1201 07:05:14.627739 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" podStartSLOduration=2.087863626 podStartE2EDuration="2.627720964s" podCreationTimestamp="2025-12-01 07:05:12 +0000 UTC" firstStartedPulling="2025-12-01 07:05:13.441892791 +0000 UTC m=+1323.006905763" lastFinishedPulling="2025-12-01 07:05:13.981750127 +0000 UTC m=+1323.546763101" observedRunningTime="2025-12-01 07:05:14.620119534 +0000 UTC m=+1324.185132507" watchObservedRunningTime="2025-12-01 07:05:14.627720964 +0000 UTC m=+1324.192733937" Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.497769 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.498482 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.498540 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.499441 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.499500 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb" gracePeriod=600 Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.663027 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb" exitCode=0 Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.663077 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb"} Dec 01 07:05:19 crc kubenswrapper[4632]: I1201 07:05:19.663120 4632 scope.go:117] "RemoveContainer" containerID="512da22c748ed508b59e03438f20cc025fae801c143de4f88251a1676e9c9863" Dec 01 07:05:20 crc kubenswrapper[4632]: I1201 07:05:20.676396 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d"} Dec 01 07:06:15 crc kubenswrapper[4632]: I1201 07:06:15.411336 4632 scope.go:117] "RemoveContainer" containerID="4d64bc01d2f7f55e9260d521e19816301f8cbe9876921532655775146f8760ca" Dec 01 07:06:15 crc kubenswrapper[4632]: I1201 07:06:15.459586 4632 scope.go:117] "RemoveContainer" containerID="bfd46054c67efe8a4f2837fd9ab612efc70a31f75726d2c418691015efef8b58" Dec 01 07:06:51 crc kubenswrapper[4632]: I1201 07:06:51.034045 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-lwxrf"] Dec 01 07:06:51 crc kubenswrapper[4632]: I1201 07:06:51.042474 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-81a0-account-create-update-vb5bg"] Dec 01 07:06:51 crc kubenswrapper[4632]: I1201 07:06:51.049007 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-lwxrf"] Dec 01 07:06:51 crc kubenswrapper[4632]: I1201 07:06:51.054323 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-81a0-account-create-update-vb5bg"] Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.023011 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-r4zxk"] Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.030962 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-b211-account-create-update-sj67v"] Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.037252 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-b211-account-create-update-sj67v"] Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.042664 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-r4zxk"] Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.759701 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="072f1a7f-3b2a-421c-a000-1bb398725d35" path="/var/lib/kubelet/pods/072f1a7f-3b2a-421c-a000-1bb398725d35/volumes" Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.760264 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a44dd2-610d-4a19-8301-08aa47df8192" path="/var/lib/kubelet/pods/14a44dd2-610d-4a19-8301-08aa47df8192/volumes" Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.760835 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e3e955-071a-41fe-a6b1-bb6e6bbd0426" path="/var/lib/kubelet/pods/27e3e955-071a-41fe-a6b1-bb6e6bbd0426/volumes" Dec 01 07:06:52 crc kubenswrapper[4632]: I1201 07:06:52.761374 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b51f3803-021a-45cc-a967-4ad902df39f2" path="/var/lib/kubelet/pods/b51f3803-021a-45cc-a967-4ad902df39f2/volumes" Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.025315 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-8p84f"] Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.033902 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-85a5-account-create-update-kglxb"] Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.041176 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-8p84f"] Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.046817 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-85a5-account-create-update-kglxb"] Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.759300 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c84236-bf94-47e0-afab-4b0ce0c0bf36" path="/var/lib/kubelet/pods/32c84236-bf94-47e0-afab-4b0ce0c0bf36/volumes" Dec 01 07:06:56 crc kubenswrapper[4632]: I1201 07:06:56.759891 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5d5f398-de6c-4cc8-8f29-44ea946e025c" path="/var/lib/kubelet/pods/b5d5f398-de6c-4cc8-8f29-44ea946e025c/volumes" Dec 01 07:07:02 crc kubenswrapper[4632]: I1201 07:07:02.584913 4632 generic.go:334] "Generic (PLEG): container finished" podID="6f79d230-5b05-468d-bf19-bb6a792c6b5d" containerID="e87a488571985b8d2df30c897695274218ffa1d0c92c42df549255ad50a7940f" exitCode=0 Dec 01 07:07:02 crc kubenswrapper[4632]: I1201 07:07:02.584985 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" event={"ID":"6f79d230-5b05-468d-bf19-bb6a792c6b5d","Type":"ContainerDied","Data":"e87a488571985b8d2df30c897695274218ffa1d0c92c42df549255ad50a7940f"} Dec 01 07:07:03 crc kubenswrapper[4632]: I1201 07:07:03.917917 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.036873 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m5c2\" (UniqueName: \"kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2\") pod \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.037004 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory\") pod \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.037033 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key\") pod \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\" (UID: \"6f79d230-5b05-468d-bf19-bb6a792c6b5d\") " Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.043401 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2" (OuterVolumeSpecName: "kube-api-access-2m5c2") pod "6f79d230-5b05-468d-bf19-bb6a792c6b5d" (UID: "6f79d230-5b05-468d-bf19-bb6a792c6b5d"). InnerVolumeSpecName "kube-api-access-2m5c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.060836 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6f79d230-5b05-468d-bf19-bb6a792c6b5d" (UID: "6f79d230-5b05-468d-bf19-bb6a792c6b5d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.061435 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory" (OuterVolumeSpecName: "inventory") pod "6f79d230-5b05-468d-bf19-bb6a792c6b5d" (UID: "6f79d230-5b05-468d-bf19-bb6a792c6b5d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.138800 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.138831 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6f79d230-5b05-468d-bf19-bb6a792c6b5d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.138840 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m5c2\" (UniqueName: \"kubernetes.io/projected/6f79d230-5b05-468d-bf19-bb6a792c6b5d-kube-api-access-2m5c2\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.605461 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" event={"ID":"6f79d230-5b05-468d-bf19-bb6a792c6b5d","Type":"ContainerDied","Data":"d99a5add34f69d6f947cb6c3ef7a2d5098859678e89658256099ace641cad552"} Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.605515 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d99a5add34f69d6f947cb6c3ef7a2d5098859678e89658256099ace641cad552" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.605525 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-l89fl" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.673277 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw"] Dec 01 07:07:04 crc kubenswrapper[4632]: E1201 07:07:04.673900 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f79d230-5b05-468d-bf19-bb6a792c6b5d" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.673965 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f79d230-5b05-468d-bf19-bb6a792c6b5d" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.674207 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f79d230-5b05-468d-bf19-bb6a792c6b5d" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.674939 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.676262 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.676547 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.676746 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.681560 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.681793 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw"] Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.853122 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wgg9\" (UniqueName: \"kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.853768 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.853851 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.956830 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.956886 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.957014 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wgg9\" (UniqueName: \"kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.962656 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.963227 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.971072 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wgg9\" (UniqueName: \"kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:04 crc kubenswrapper[4632]: I1201 07:07:04.994518 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:07:05 crc kubenswrapper[4632]: I1201 07:07:05.429566 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw"] Dec 01 07:07:05 crc kubenswrapper[4632]: I1201 07:07:05.433254 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:07:05 crc kubenswrapper[4632]: I1201 07:07:05.615906 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" event={"ID":"3cb46d14-bcaa-4c50-99c0-5d6693557f5d","Type":"ContainerStarted","Data":"3675825593ad297a4221ba317a65a768b93d45b2ed13a733ad0068b5cc846bdd"} Dec 01 07:07:06 crc kubenswrapper[4632]: I1201 07:07:06.625650 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" event={"ID":"3cb46d14-bcaa-4c50-99c0-5d6693557f5d","Type":"ContainerStarted","Data":"bbc7bf9b33c206152b219138e5c27fa57a168fedad2b3186b255073e1c1a7eeb"} Dec 01 07:07:06 crc kubenswrapper[4632]: I1201 07:07:06.646421 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" podStartSLOduration=2.044795307 podStartE2EDuration="2.646404531s" podCreationTimestamp="2025-12-01 07:07:04 +0000 UTC" firstStartedPulling="2025-12-01 07:07:05.433021371 +0000 UTC m=+1434.998034345" lastFinishedPulling="2025-12-01 07:07:06.034630606 +0000 UTC m=+1435.599643569" observedRunningTime="2025-12-01 07:07:06.638919273 +0000 UTC m=+1436.203932246" watchObservedRunningTime="2025-12-01 07:07:06.646404531 +0000 UTC m=+1436.211417505" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.071338 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.074670 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.076412 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwljd\" (UniqueName: \"kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.076718 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.076999 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.084619 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.179243 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.179421 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.179498 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwljd\" (UniqueName: \"kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.179783 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.180020 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.202520 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwljd\" (UniqueName: \"kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd\") pod \"redhat-marketplace-zwf88\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.396085 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:11 crc kubenswrapper[4632]: I1201 07:07:11.832459 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:12 crc kubenswrapper[4632]: I1201 07:07:12.688722 4632 generic.go:334] "Generic (PLEG): container finished" podID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerID="090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84" exitCode=0 Dec 01 07:07:12 crc kubenswrapper[4632]: I1201 07:07:12.689047 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerDied","Data":"090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84"} Dec 01 07:07:12 crc kubenswrapper[4632]: I1201 07:07:12.689083 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerStarted","Data":"00c146b770e7bfe7bdc2d266ba7f5a066e0b2b97e55c26dcd76e7ea7ded4d364"} Dec 01 07:07:13 crc kubenswrapper[4632]: I1201 07:07:13.703544 4632 generic.go:334] "Generic (PLEG): container finished" podID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerID="ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21" exitCode=0 Dec 01 07:07:13 crc kubenswrapper[4632]: I1201 07:07:13.703725 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerDied","Data":"ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21"} Dec 01 07:07:14 crc kubenswrapper[4632]: I1201 07:07:14.716717 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerStarted","Data":"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb"} Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.562245 4632 scope.go:117] "RemoveContainer" containerID="6f3f88cf8a1e5bebeea6cb4ae9b816b4fd2d2cd8c044eb290c254aa60c8d2bc0" Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.591757 4632 scope.go:117] "RemoveContainer" containerID="d7293d7e53876ba338027a03fb1f0b9c2de4692e1f777ce2d474e3b2d012d56c" Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.636383 4632 scope.go:117] "RemoveContainer" containerID="9a35cd51125bb19cc432f2c69d657409db7348508467c35cdecdee984a7a9a96" Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.686980 4632 scope.go:117] "RemoveContainer" containerID="fd79fbda2e9a90e8d0536b1bbf30d130c251d03e6f1b9c149ed800f286861e71" Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.727565 4632 scope.go:117] "RemoveContainer" containerID="e9db658058f1ee45e7e1d18a5b67ffc8a4859bfc83bc0dc3ddcc34c33b6c6dd8" Dec 01 07:07:15 crc kubenswrapper[4632]: I1201 07:07:15.767057 4632 scope.go:117] "RemoveContainer" containerID="aaf4ac54b73f8f7517868203a2a2fc0249c42c543e12e95aefca6a3c7e9702ed" Dec 01 07:07:19 crc kubenswrapper[4632]: I1201 07:07:19.497598 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:07:19 crc kubenswrapper[4632]: I1201 07:07:19.498330 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.033900 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zwf88" podStartSLOduration=8.440710353 podStartE2EDuration="10.033870944s" podCreationTimestamp="2025-12-01 07:07:11 +0000 UTC" firstStartedPulling="2025-12-01 07:07:12.692934137 +0000 UTC m=+1442.257947110" lastFinishedPulling="2025-12-01 07:07:14.286094728 +0000 UTC m=+1443.851107701" observedRunningTime="2025-12-01 07:07:14.740091002 +0000 UTC m=+1444.305103975" watchObservedRunningTime="2025-12-01 07:07:21.033870944 +0000 UTC m=+1450.598883917" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.040099 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-984wj"] Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.048449 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-984wj"] Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.396194 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.396613 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.438894 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.822087 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:21 crc kubenswrapper[4632]: I1201 07:07:21.866551 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:22 crc kubenswrapper[4632]: I1201 07:07:22.760007 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73583357-a8a2-4812-ba66-553cc7713bd4" path="/var/lib/kubelet/pods/73583357-a8a2-4812-ba66-553cc7713bd4/volumes" Dec 01 07:07:23 crc kubenswrapper[4632]: I1201 07:07:23.798398 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zwf88" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="registry-server" containerID="cri-o://7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb" gracePeriod=2 Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.211731 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.347277 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwljd\" (UniqueName: \"kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd\") pod \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.347430 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content\") pod \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.347456 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities\") pod \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\" (UID: \"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b\") " Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.348319 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities" (OuterVolumeSpecName: "utilities") pod "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" (UID: "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.353268 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd" (OuterVolumeSpecName: "kube-api-access-fwljd") pod "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" (UID: "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b"). InnerVolumeSpecName "kube-api-access-fwljd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.361504 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" (UID: "a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.451044 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwljd\" (UniqueName: \"kubernetes.io/projected/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-kube-api-access-fwljd\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.451082 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.451097 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.812322 4632 generic.go:334] "Generic (PLEG): container finished" podID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerID="7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb" exitCode=0 Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.812384 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerDied","Data":"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb"} Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.812452 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwf88" event={"ID":"a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b","Type":"ContainerDied","Data":"00c146b770e7bfe7bdc2d266ba7f5a066e0b2b97e55c26dcd76e7ea7ded4d364"} Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.812466 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwf88" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.812480 4632 scope.go:117] "RemoveContainer" containerID="7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.837894 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.839066 4632 scope.go:117] "RemoveContainer" containerID="ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.845111 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwf88"] Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.865432 4632 scope.go:117] "RemoveContainer" containerID="090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.901303 4632 scope.go:117] "RemoveContainer" containerID="7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb" Dec 01 07:07:24 crc kubenswrapper[4632]: E1201 07:07:24.902147 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb\": container with ID starting with 7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb not found: ID does not exist" containerID="7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.902224 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb"} err="failed to get container status \"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb\": rpc error: code = NotFound desc = could not find container \"7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb\": container with ID starting with 7b5b9195c645d11b65a7d9dbbd9b43d9223ff8ecaa6dd147e9e157e73e9b1afb not found: ID does not exist" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.902278 4632 scope.go:117] "RemoveContainer" containerID="ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21" Dec 01 07:07:24 crc kubenswrapper[4632]: E1201 07:07:24.902679 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21\": container with ID starting with ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21 not found: ID does not exist" containerID="ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.902727 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21"} err="failed to get container status \"ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21\": rpc error: code = NotFound desc = could not find container \"ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21\": container with ID starting with ea7914b8e9bdf5489d91c2f81e50aa2ba79821bd4ef4b97a199216cdab93af21 not found: ID does not exist" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.902762 4632 scope.go:117] "RemoveContainer" containerID="090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84" Dec 01 07:07:24 crc kubenswrapper[4632]: E1201 07:07:24.902982 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84\": container with ID starting with 090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84 not found: ID does not exist" containerID="090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84" Dec 01 07:07:24 crc kubenswrapper[4632]: I1201 07:07:24.903006 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84"} err="failed to get container status \"090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84\": rpc error: code = NotFound desc = could not find container \"090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84\": container with ID starting with 090f3b73fe0ae3524a65472c374adec60eb8fa02bdfb81d7734699f27e125f84 not found: ID does not exist" Dec 01 07:07:26 crc kubenswrapper[4632]: I1201 07:07:26.759974 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" path="/var/lib/kubelet/pods/a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b/volumes" Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.034918 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-dr5wg"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.041877 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-93e6-account-create-update-k2rwx"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.048388 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-b9m9r"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.065094 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a527-account-create-update-26dwb"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.071142 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-a828-account-create-update-wt9ph"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.079219 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a527-account-create-update-26dwb"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.085397 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-dr5wg"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.093121 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-93e6-account-create-update-k2rwx"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.100882 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-b9m9r"] Dec 01 07:07:33 crc kubenswrapper[4632]: I1201 07:07:33.107266 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-a828-account-create-update-wt9ph"] Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.026646 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-nsn2w"] Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.035267 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-nsn2w"] Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.759384 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15e859ff-6075-4487-a933-1e037cfa00d0" path="/var/lib/kubelet/pods/15e859ff-6075-4487-a933-1e037cfa00d0/volumes" Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.760321 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d4743b6-ab1b-4792-a781-4849f8b13e94" path="/var/lib/kubelet/pods/2d4743b6-ab1b-4792-a781-4849f8b13e94/volumes" Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.760866 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c0bc241-1784-4738-9eb9-cba060f1d9d8" path="/var/lib/kubelet/pods/3c0bc241-1784-4738-9eb9-cba060f1d9d8/volumes" Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.761391 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43c55cae-a062-4bf2-8649-fee844127588" path="/var/lib/kubelet/pods/43c55cae-a062-4bf2-8649-fee844127588/volumes" Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.762407 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="826e84c3-9339-4e50-9845-ca3dd1e9fc67" path="/var/lib/kubelet/pods/826e84c3-9339-4e50-9845-ca3dd1e9fc67/volumes" Dec 01 07:07:34 crc kubenswrapper[4632]: I1201 07:07:34.762919 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faaa590b-70fd-446c-a484-e932b8d2549a" path="/var/lib/kubelet/pods/faaa590b-70fd-446c-a484-e932b8d2549a/volumes" Dec 01 07:07:40 crc kubenswrapper[4632]: I1201 07:07:40.038059 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-pgkg7"] Dec 01 07:07:40 crc kubenswrapper[4632]: I1201 07:07:40.044885 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-pgkg7"] Dec 01 07:07:40 crc kubenswrapper[4632]: I1201 07:07:40.759422 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57240989-7936-48f2-9686-ef72e5dfa1a5" path="/var/lib/kubelet/pods/57240989-7936-48f2-9686-ef72e5dfa1a5/volumes" Dec 01 07:07:49 crc kubenswrapper[4632]: I1201 07:07:49.498057 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:07:49 crc kubenswrapper[4632]: I1201 07:07:49.498681 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:07:54 crc kubenswrapper[4632]: I1201 07:07:54.033667 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-4rvbb"] Dec 01 07:07:54 crc kubenswrapper[4632]: I1201 07:07:54.042374 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-4rvbb"] Dec 01 07:07:54 crc kubenswrapper[4632]: I1201 07:07:54.764770 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57384d37-fe8e-4534-b99f-579737abcab7" path="/var/lib/kubelet/pods/57384d37-fe8e-4534-b99f-579737abcab7/volumes" Dec 01 07:07:57 crc kubenswrapper[4632]: I1201 07:07:57.027044 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bqchp"] Dec 01 07:07:57 crc kubenswrapper[4632]: I1201 07:07:57.032596 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bqchp"] Dec 01 07:07:58 crc kubenswrapper[4632]: I1201 07:07:58.760145 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17a28608-4b5e-435b-868f-0c9cc98f7c91" path="/var/lib/kubelet/pods/17a28608-4b5e-435b-868f-0c9cc98f7c91/volumes" Dec 01 07:08:07 crc kubenswrapper[4632]: I1201 07:08:07.072672 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-nbn5v"] Dec 01 07:08:07 crc kubenswrapper[4632]: I1201 07:08:07.079834 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-nbn5v"] Dec 01 07:08:08 crc kubenswrapper[4632]: I1201 07:08:08.759006 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64108b12-b957-4e10-be96-e49cab11acdc" path="/var/lib/kubelet/pods/64108b12-b957-4e10-be96-e49cab11acdc/volumes" Dec 01 07:08:11 crc kubenswrapper[4632]: I1201 07:08:11.027978 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-xvgfc"] Dec 01 07:08:11 crc kubenswrapper[4632]: I1201 07:08:11.035820 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-xvgfc"] Dec 01 07:08:12 crc kubenswrapper[4632]: I1201 07:08:12.760264 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="803c9fb6-6650-4865-b7f1-ed485299302f" path="/var/lib/kubelet/pods/803c9fb6-6650-4865-b7f1-ed485299302f/volumes" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.946808 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:13 crc kubenswrapper[4632]: E1201 07:08:13.947440 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="registry-server" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.947456 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="registry-server" Dec 01 07:08:13 crc kubenswrapper[4632]: E1201 07:08:13.947477 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="extract-content" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.947482 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="extract-content" Dec 01 07:08:13 crc kubenswrapper[4632]: E1201 07:08:13.947520 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="extract-utilities" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.947526 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="extract-utilities" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.947744 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6b5bdf6-38ad-4ad0-8790-3137bd5ed92b" containerName="registry-server" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.949016 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:13 crc kubenswrapper[4632]: I1201 07:08:13.957146 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.105581 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.105891 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.105978 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn48s\" (UniqueName: \"kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.208113 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn48s\" (UniqueName: \"kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.208212 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.208311 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.208785 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.209348 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.229646 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn48s\" (UniqueName: \"kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s\") pod \"community-operators-s54pw\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.278777 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:14 crc kubenswrapper[4632]: I1201 07:08:14.741558 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.272743 4632 generic.go:334] "Generic (PLEG): container finished" podID="443b42de-b846-47a2-abfa-4807f4e2310c" containerID="4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc" exitCode=0 Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.272827 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerDied","Data":"4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc"} Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.272875 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerStarted","Data":"3c26cb6a09d7a841fd815c437da6e630b5cbd49d655a3d60f5729a75e401d5b4"} Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.868939 4632 scope.go:117] "RemoveContainer" containerID="89bf2c33db503953d591a56506618d04a7b870c7338ec0e8892c1cd3a1062f94" Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.889725 4632 scope.go:117] "RemoveContainer" containerID="9a1fbceb62690412f3971e3668768c4c4764e31aed7fc2504b7611591da48db9" Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.932030 4632 scope.go:117] "RemoveContainer" containerID="c5fb34b21eac3eab41cc36fd135051a55ecabfa1a4de3bae5ac9d52606b1176f" Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.969111 4632 scope.go:117] "RemoveContainer" containerID="c7ac4a849f34520e3c55784649c1f2efb713a2dd22885c3fa6ab81afe2f36b63" Dec 01 07:08:15 crc kubenswrapper[4632]: I1201 07:08:15.998341 4632 scope.go:117] "RemoveContainer" containerID="87539484663fff4cba89e750f3d9c33f4eda19407f661838a880c975a0ace6d8" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.026456 4632 scope.go:117] "RemoveContainer" containerID="69d68e096c31bb52b93885c0077c4cbaefa6791d617dee7fd5debdb1f5131dc8" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.069538 4632 scope.go:117] "RemoveContainer" containerID="8cf22025785f727164843e0e6a640dfb8cd8e0bc6f92f660d17e053cba35c8fb" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.106235 4632 scope.go:117] "RemoveContainer" containerID="407d4134f2fb4d65cf02f7676c4ea3065d0b4ecb9507bd9a2300329cd709a788" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.172295 4632 scope.go:117] "RemoveContainer" containerID="ed629d560af6a946907e282a4d8b0a4ef2d9a6dfb6728cd16e6264e2626398e9" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.188517 4632 scope.go:117] "RemoveContainer" containerID="2da2a7ef1e02681148f3622763972f6fba2b72eaf684467e5d6752c822080119" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.203952 4632 scope.go:117] "RemoveContainer" containerID="44a1d0b62e06750c4258ee13d7ff0b063a11a7ed141608992782e1f8a77ba7fa" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.233022 4632 scope.go:117] "RemoveContainer" containerID="415a7ab3b8973040458a0bc2b77217ebe5a8d21230f0ba79b29cbb2d16521e8c" Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.307317 4632 generic.go:334] "Generic (PLEG): container finished" podID="3cb46d14-bcaa-4c50-99c0-5d6693557f5d" containerID="bbc7bf9b33c206152b219138e5c27fa57a168fedad2b3186b255073e1c1a7eeb" exitCode=0 Dec 01 07:08:16 crc kubenswrapper[4632]: I1201 07:08:16.307383 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" event={"ID":"3cb46d14-bcaa-4c50-99c0-5d6693557f5d","Type":"ContainerDied","Data":"bbc7bf9b33c206152b219138e5c27fa57a168fedad2b3186b255073e1c1a7eeb"} Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.317322 4632 generic.go:334] "Generic (PLEG): container finished" podID="443b42de-b846-47a2-abfa-4807f4e2310c" containerID="8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0" exitCode=0 Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.317392 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerDied","Data":"8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0"} Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.670423 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.704529 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key\") pod \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.704579 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wgg9\" (UniqueName: \"kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9\") pod \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.704847 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory\") pod \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\" (UID: \"3cb46d14-bcaa-4c50-99c0-5d6693557f5d\") " Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.710797 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9" (OuterVolumeSpecName: "kube-api-access-7wgg9") pod "3cb46d14-bcaa-4c50-99c0-5d6693557f5d" (UID: "3cb46d14-bcaa-4c50-99c0-5d6693557f5d"). InnerVolumeSpecName "kube-api-access-7wgg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.728743 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory" (OuterVolumeSpecName: "inventory") pod "3cb46d14-bcaa-4c50-99c0-5d6693557f5d" (UID: "3cb46d14-bcaa-4c50-99c0-5d6693557f5d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.731305 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3cb46d14-bcaa-4c50-99c0-5d6693557f5d" (UID: "3cb46d14-bcaa-4c50-99c0-5d6693557f5d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.807602 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.807638 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:17 crc kubenswrapper[4632]: I1201 07:08:17.807649 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wgg9\" (UniqueName: \"kubernetes.io/projected/3cb46d14-bcaa-4c50-99c0-5d6693557f5d-kube-api-access-7wgg9\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.327482 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" event={"ID":"3cb46d14-bcaa-4c50-99c0-5d6693557f5d","Type":"ContainerDied","Data":"3675825593ad297a4221ba317a65a768b93d45b2ed13a733ad0068b5cc846bdd"} Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.328486 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3675825593ad297a4221ba317a65a768b93d45b2ed13a733ad0068b5cc846bdd" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.327506 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.329693 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerStarted","Data":"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0"} Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.358436 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s54pw" podStartSLOduration=2.746009655 podStartE2EDuration="5.358416983s" podCreationTimestamp="2025-12-01 07:08:13 +0000 UTC" firstStartedPulling="2025-12-01 07:08:15.275103455 +0000 UTC m=+1504.840116428" lastFinishedPulling="2025-12-01 07:08:17.887510784 +0000 UTC m=+1507.452523756" observedRunningTime="2025-12-01 07:08:18.349241776 +0000 UTC m=+1507.914254759" watchObservedRunningTime="2025-12-01 07:08:18.358416983 +0000 UTC m=+1507.923429956" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.403513 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl"] Dec 01 07:08:18 crc kubenswrapper[4632]: E1201 07:08:18.403956 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb46d14-bcaa-4c50-99c0-5d6693557f5d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.403978 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb46d14-bcaa-4c50-99c0-5d6693557f5d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.404202 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb46d14-bcaa-4c50-99c0-5d6693557f5d" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.404945 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.406677 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.407053 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.408022 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.408200 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.415205 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl"] Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.520892 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.520942 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.521008 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bmj2\" (UniqueName: \"kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.623362 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.623426 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.623486 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bmj2\" (UniqueName: \"kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.629844 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.629872 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.637886 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bmj2\" (UniqueName: \"kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-znccl\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:18 crc kubenswrapper[4632]: I1201 07:08:18.722757 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.202653 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl"] Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.360994 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" event={"ID":"af7575c1-47ca-4c63-bdca-1a42d23485ee","Type":"ContainerStarted","Data":"b6c96c755268185ad5c33806432614bba94ec7e9f14f06cd3441bc33722cd029"} Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.497907 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.497984 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.498044 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.498975 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:08:19 crc kubenswrapper[4632]: I1201 07:08:19.499040 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" gracePeriod=600 Dec 01 07:08:19 crc kubenswrapper[4632]: E1201 07:08:19.626697 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.372606 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" event={"ID":"af7575c1-47ca-4c63-bdca-1a42d23485ee","Type":"ContainerStarted","Data":"9de0044e0e0d64a988979becd8c73a39cbd4094b54934aaa5c250c10a61180ea"} Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.375661 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" exitCode=0 Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.375698 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d"} Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.375727 4632 scope.go:117] "RemoveContainer" containerID="11e1fa55914375027dc630d97c2d09c26eb05a0a7d516ea6a156b27687f1b2bb" Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.376127 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:08:20 crc kubenswrapper[4632]: E1201 07:08:20.376368 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:08:20 crc kubenswrapper[4632]: I1201 07:08:20.391928 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" podStartSLOduration=1.706855075 podStartE2EDuration="2.391917729s" podCreationTimestamp="2025-12-01 07:08:18 +0000 UTC" firstStartedPulling="2025-12-01 07:08:19.212435084 +0000 UTC m=+1508.777448057" lastFinishedPulling="2025-12-01 07:08:19.897497738 +0000 UTC m=+1509.462510711" observedRunningTime="2025-12-01 07:08:20.387473757 +0000 UTC m=+1509.952486751" watchObservedRunningTime="2025-12-01 07:08:20.391917729 +0000 UTC m=+1509.956930703" Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.279563 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.280391 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.313547 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.418563 4632 generic.go:334] "Generic (PLEG): container finished" podID="af7575c1-47ca-4c63-bdca-1a42d23485ee" containerID="9de0044e0e0d64a988979becd8c73a39cbd4094b54934aaa5c250c10a61180ea" exitCode=0 Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.418753 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" event={"ID":"af7575c1-47ca-4c63-bdca-1a42d23485ee","Type":"ContainerDied","Data":"9de0044e0e0d64a988979becd8c73a39cbd4094b54934aaa5c250c10a61180ea"} Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.452944 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:24 crc kubenswrapper[4632]: I1201 07:08:24.547515 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.766433 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.867438 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory\") pod \"af7575c1-47ca-4c63-bdca-1a42d23485ee\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.867553 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bmj2\" (UniqueName: \"kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2\") pod \"af7575c1-47ca-4c63-bdca-1a42d23485ee\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.867697 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key\") pod \"af7575c1-47ca-4c63-bdca-1a42d23485ee\" (UID: \"af7575c1-47ca-4c63-bdca-1a42d23485ee\") " Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.873991 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2" (OuterVolumeSpecName: "kube-api-access-8bmj2") pod "af7575c1-47ca-4c63-bdca-1a42d23485ee" (UID: "af7575c1-47ca-4c63-bdca-1a42d23485ee"). InnerVolumeSpecName "kube-api-access-8bmj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.894497 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "af7575c1-47ca-4c63-bdca-1a42d23485ee" (UID: "af7575c1-47ca-4c63-bdca-1a42d23485ee"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.894975 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory" (OuterVolumeSpecName: "inventory") pod "af7575c1-47ca-4c63-bdca-1a42d23485ee" (UID: "af7575c1-47ca-4c63-bdca-1a42d23485ee"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.971198 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.971344 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/af7575c1-47ca-4c63-bdca-1a42d23485ee-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:25 crc kubenswrapper[4632]: I1201 07:08:25.971431 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bmj2\" (UniqueName: \"kubernetes.io/projected/af7575c1-47ca-4c63-bdca-1a42d23485ee-kube-api-access-8bmj2\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.444487 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.444479 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-znccl" event={"ID":"af7575c1-47ca-4c63-bdca-1a42d23485ee","Type":"ContainerDied","Data":"b6c96c755268185ad5c33806432614bba94ec7e9f14f06cd3441bc33722cd029"} Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.444545 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6c96c755268185ad5c33806432614bba94ec7e9f14f06cd3441bc33722cd029" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.444597 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s54pw" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="registry-server" containerID="cri-o://c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0" gracePeriod=2 Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.487224 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq"] Dec 01 07:08:26 crc kubenswrapper[4632]: E1201 07:08:26.487705 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af7575c1-47ca-4c63-bdca-1a42d23485ee" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.487722 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="af7575c1-47ca-4c63-bdca-1a42d23485ee" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.487907 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="af7575c1-47ca-4c63-bdca-1a42d23485ee" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.488574 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.490414 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.491412 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.491429 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.491431 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.498454 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq"] Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.585722 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.586098 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2r4b\" (UniqueName: \"kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.586143 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.689278 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2r4b\" (UniqueName: \"kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.689363 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.689571 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.696175 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.700446 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.704950 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2r4b\" (UniqueName: \"kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-9frjq\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.845787 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.933650 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.994528 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn48s\" (UniqueName: \"kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s\") pod \"443b42de-b846-47a2-abfa-4807f4e2310c\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.994590 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities\") pod \"443b42de-b846-47a2-abfa-4807f4e2310c\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.994614 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content\") pod \"443b42de-b846-47a2-abfa-4807f4e2310c\" (UID: \"443b42de-b846-47a2-abfa-4807f4e2310c\") " Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.996331 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities" (OuterVolumeSpecName: "utilities") pod "443b42de-b846-47a2-abfa-4807f4e2310c" (UID: "443b42de-b846-47a2-abfa-4807f4e2310c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:08:26 crc kubenswrapper[4632]: I1201 07:08:26.998943 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s" (OuterVolumeSpecName: "kube-api-access-gn48s") pod "443b42de-b846-47a2-abfa-4807f4e2310c" (UID: "443b42de-b846-47a2-abfa-4807f4e2310c"). InnerVolumeSpecName "kube-api-access-gn48s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.036891 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "443b42de-b846-47a2-abfa-4807f4e2310c" (UID: "443b42de-b846-47a2-abfa-4807f4e2310c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.097379 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.097415 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/443b42de-b846-47a2-abfa-4807f4e2310c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.097429 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn48s\" (UniqueName: \"kubernetes.io/projected/443b42de-b846-47a2-abfa-4807f4e2310c-kube-api-access-gn48s\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.390433 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq"] Dec 01 07:08:27 crc kubenswrapper[4632]: W1201 07:08:27.391211 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2421bc45_8d08_4634_861d_e3f185b01e54.slice/crio-a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292 WatchSource:0}: Error finding container a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292: Status 404 returned error can't find the container with id a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292 Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.454738 4632 generic.go:334] "Generic (PLEG): container finished" podID="443b42de-b846-47a2-abfa-4807f4e2310c" containerID="c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0" exitCode=0 Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.454799 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerDied","Data":"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0"} Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.454843 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s54pw" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.454865 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s54pw" event={"ID":"443b42de-b846-47a2-abfa-4807f4e2310c","Type":"ContainerDied","Data":"3c26cb6a09d7a841fd815c437da6e630b5cbd49d655a3d60f5729a75e401d5b4"} Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.454893 4632 scope.go:117] "RemoveContainer" containerID="c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.456045 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" event={"ID":"2421bc45-8d08-4634-861d-e3f185b01e54","Type":"ContainerStarted","Data":"a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292"} Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.473417 4632 scope.go:117] "RemoveContainer" containerID="8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.492321 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.499199 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s54pw"] Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.510648 4632 scope.go:117] "RemoveContainer" containerID="4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.530052 4632 scope.go:117] "RemoveContainer" containerID="c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0" Dec 01 07:08:27 crc kubenswrapper[4632]: E1201 07:08:27.530552 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0\": container with ID starting with c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0 not found: ID does not exist" containerID="c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.530596 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0"} err="failed to get container status \"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0\": rpc error: code = NotFound desc = could not find container \"c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0\": container with ID starting with c284706192704742c4ccc031930850920e5c30b4615f45419982b45b51b4a3f0 not found: ID does not exist" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.530626 4632 scope.go:117] "RemoveContainer" containerID="8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0" Dec 01 07:08:27 crc kubenswrapper[4632]: E1201 07:08:27.530967 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0\": container with ID starting with 8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0 not found: ID does not exist" containerID="8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.531007 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0"} err="failed to get container status \"8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0\": rpc error: code = NotFound desc = could not find container \"8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0\": container with ID starting with 8ea5663c79071332ba8f36bb0d77fb37ac265349deaa44e0f655434720363ac0 not found: ID does not exist" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.531033 4632 scope.go:117] "RemoveContainer" containerID="4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc" Dec 01 07:08:27 crc kubenswrapper[4632]: E1201 07:08:27.531614 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc\": container with ID starting with 4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc not found: ID does not exist" containerID="4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc" Dec 01 07:08:27 crc kubenswrapper[4632]: I1201 07:08:27.531636 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc"} err="failed to get container status \"4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc\": rpc error: code = NotFound desc = could not find container \"4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc\": container with ID starting with 4cca9846311da6a4d882e0b3413bd59e9256f41e5244d89ec040b0f73df024fc not found: ID does not exist" Dec 01 07:08:28 crc kubenswrapper[4632]: I1201 07:08:28.467645 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" event={"ID":"2421bc45-8d08-4634-861d-e3f185b01e54","Type":"ContainerStarted","Data":"53226abd14553c0649ca4c6218ace5a354fbe3f178d91e12512b3b8abc1ef17a"} Dec 01 07:08:28 crc kubenswrapper[4632]: I1201 07:08:28.489366 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" podStartSLOduration=1.957179218 podStartE2EDuration="2.489328435s" podCreationTimestamp="2025-12-01 07:08:26 +0000 UTC" firstStartedPulling="2025-12-01 07:08:27.393788202 +0000 UTC m=+1516.958801175" lastFinishedPulling="2025-12-01 07:08:27.925937419 +0000 UTC m=+1517.490950392" observedRunningTime="2025-12-01 07:08:28.480452723 +0000 UTC m=+1518.045465696" watchObservedRunningTime="2025-12-01 07:08:28.489328435 +0000 UTC m=+1518.054341408" Dec 01 07:08:28 crc kubenswrapper[4632]: I1201 07:08:28.762520 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" path="/var/lib/kubelet/pods/443b42de-b846-47a2-abfa-4807f4e2310c/volumes" Dec 01 07:08:31 crc kubenswrapper[4632]: I1201 07:08:31.028606 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-7q66w"] Dec 01 07:08:31 crc kubenswrapper[4632]: I1201 07:08:31.037248 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-7q66w"] Dec 01 07:08:32 crc kubenswrapper[4632]: I1201 07:08:32.760288 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b176a21-27cf-4608-8787-e91a914be7cb" path="/var/lib/kubelet/pods/6b176a21-27cf-4608-8787-e91a914be7cb/volumes" Dec 01 07:08:33 crc kubenswrapper[4632]: I1201 07:08:33.750018 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:08:33 crc kubenswrapper[4632]: E1201 07:08:33.750748 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:08:48 crc kubenswrapper[4632]: I1201 07:08:48.750063 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:08:48 crc kubenswrapper[4632]: E1201 07:08:48.751004 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.042098 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-fnpks"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.059343 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-x94rl"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.065123 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-05f8-account-create-update-wr5n9"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.073491 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-fnpks"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.077306 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-46f1-account-create-update-5zv72"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.084449 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-pmhkj"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.089546 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-05bc-account-create-update-58k9r"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.094162 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-x94rl"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.098723 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-46f1-account-create-update-5zv72"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.103489 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-05f8-account-create-update-wr5n9"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.108130 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-pmhkj"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.112692 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-05bc-account-create-update-58k9r"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.249171 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:08:52 crc kubenswrapper[4632]: E1201 07:08:52.249660 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="registry-server" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.249683 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="registry-server" Dec 01 07:08:52 crc kubenswrapper[4632]: E1201 07:08:52.249697 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="extract-utilities" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.249726 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="extract-utilities" Dec 01 07:08:52 crc kubenswrapper[4632]: E1201 07:08:52.249755 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="extract-content" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.249761 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="extract-content" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.249970 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="443b42de-b846-47a2-abfa-4807f4e2310c" containerName="registry-server" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.252238 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.260479 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.364368 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gdjq\" (UniqueName: \"kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.364452 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.364478 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.466479 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gdjq\" (UniqueName: \"kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.466624 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.466648 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.467325 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.467396 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.486401 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gdjq\" (UniqueName: \"kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq\") pod \"certified-operators-d84tf\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.571696 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.768037 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227dea0e-053e-4c4e-a209-5b6ad4f9145f" path="/var/lib/kubelet/pods/227dea0e-053e-4c4e-a209-5b6ad4f9145f/volumes" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.769797 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="484f5acc-9309-4561-8055-1fc5df33e183" path="/var/lib/kubelet/pods/484f5acc-9309-4561-8055-1fc5df33e183/volumes" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.770436 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80322b90-d8de-4544-8962-3761a3d13e03" path="/var/lib/kubelet/pods/80322b90-d8de-4544-8962-3761a3d13e03/volumes" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.770971 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7cf908d-5642-4d23-9874-d4b7b1d3f323" path="/var/lib/kubelet/pods/b7cf908d-5642-4d23-9874-d4b7b1d3f323/volumes" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.771965 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e705f257-798d-43b0-985c-bf3499b2c720" path="/var/lib/kubelet/pods/e705f257-798d-43b0-985c-bf3499b2c720/volumes" Dec 01 07:08:52 crc kubenswrapper[4632]: I1201 07:08:52.772543 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbecd90a-c90a-4fe8-a349-7701b7256955" path="/var/lib/kubelet/pods/fbecd90a-c90a-4fe8-a349-7701b7256955/volumes" Dec 01 07:08:53 crc kubenswrapper[4632]: I1201 07:08:53.057056 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:08:53 crc kubenswrapper[4632]: I1201 07:08:53.689793 4632 generic.go:334] "Generic (PLEG): container finished" podID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerID="13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5" exitCode=0 Dec 01 07:08:53 crc kubenswrapper[4632]: I1201 07:08:53.689930 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerDied","Data":"13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5"} Dec 01 07:08:53 crc kubenswrapper[4632]: I1201 07:08:53.690132 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerStarted","Data":"2b921228051fa74616c595dc419ea3f4cab9b43448799843135214b6df78f1ff"} Dec 01 07:08:54 crc kubenswrapper[4632]: I1201 07:08:54.700884 4632 generic.go:334] "Generic (PLEG): container finished" podID="2421bc45-8d08-4634-861d-e3f185b01e54" containerID="53226abd14553c0649ca4c6218ace5a354fbe3f178d91e12512b3b8abc1ef17a" exitCode=0 Dec 01 07:08:54 crc kubenswrapper[4632]: I1201 07:08:54.700964 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" event={"ID":"2421bc45-8d08-4634-861d-e3f185b01e54","Type":"ContainerDied","Data":"53226abd14553c0649ca4c6218ace5a354fbe3f178d91e12512b3b8abc1ef17a"} Dec 01 07:08:55 crc kubenswrapper[4632]: E1201 07:08:55.052263 4632 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c9b8d4b_67dc_42e2_b3a9_ac647c455d26.slice/crio-074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1.scope\": RecentStats: unable to find data in memory cache]" Dec 01 07:08:55 crc kubenswrapper[4632]: I1201 07:08:55.712766 4632 generic.go:334] "Generic (PLEG): container finished" podID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerID="074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1" exitCode=0 Dec 01 07:08:55 crc kubenswrapper[4632]: I1201 07:08:55.712864 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerDied","Data":"074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1"} Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.046604 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.242175 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory\") pod \"2421bc45-8d08-4634-861d-e3f185b01e54\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.242254 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2r4b\" (UniqueName: \"kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b\") pod \"2421bc45-8d08-4634-861d-e3f185b01e54\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.242447 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key\") pod \"2421bc45-8d08-4634-861d-e3f185b01e54\" (UID: \"2421bc45-8d08-4634-861d-e3f185b01e54\") " Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.249982 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b" (OuterVolumeSpecName: "kube-api-access-f2r4b") pod "2421bc45-8d08-4634-861d-e3f185b01e54" (UID: "2421bc45-8d08-4634-861d-e3f185b01e54"). InnerVolumeSpecName "kube-api-access-f2r4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.270891 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2421bc45-8d08-4634-861d-e3f185b01e54" (UID: "2421bc45-8d08-4634-861d-e3f185b01e54"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.271006 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory" (OuterVolumeSpecName: "inventory") pod "2421bc45-8d08-4634-861d-e3f185b01e54" (UID: "2421bc45-8d08-4634-861d-e3f185b01e54"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.345726 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.345764 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2421bc45-8d08-4634-861d-e3f185b01e54-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.345775 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2r4b\" (UniqueName: \"kubernetes.io/projected/2421bc45-8d08-4634-861d-e3f185b01e54-kube-api-access-f2r4b\") on node \"crc\" DevicePath \"\"" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.724240 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" event={"ID":"2421bc45-8d08-4634-861d-e3f185b01e54","Type":"ContainerDied","Data":"a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292"} Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.724584 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1529490e471d62c09f1d0c3138e1fa03d057c11f32771371ec83f81efe60292" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.724300 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-9frjq" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.727373 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerStarted","Data":"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02"} Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.779167 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d84tf" podStartSLOduration=2.178124119 podStartE2EDuration="4.779142914s" podCreationTimestamp="2025-12-01 07:08:52 +0000 UTC" firstStartedPulling="2025-12-01 07:08:53.691259848 +0000 UTC m=+1543.256272812" lastFinishedPulling="2025-12-01 07:08:56.292278634 +0000 UTC m=+1545.857291607" observedRunningTime="2025-12-01 07:08:56.742396623 +0000 UTC m=+1546.307409596" watchObservedRunningTime="2025-12-01 07:08:56.779142914 +0000 UTC m=+1546.344155888" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.806763 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886"] Dec 01 07:08:56 crc kubenswrapper[4632]: E1201 07:08:56.807479 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2421bc45-8d08-4634-861d-e3f185b01e54" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.807609 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2421bc45-8d08-4634-861d-e3f185b01e54" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.807937 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="2421bc45-8d08-4634-861d-e3f185b01e54" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.808753 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.810859 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.811112 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.811286 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.811504 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.819635 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886"] Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.872627 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjm7w\" (UniqueName: \"kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.872676 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.872982 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.974952 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjm7w\" (UniqueName: \"kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.975232 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.975404 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.981209 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.981749 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:56 crc kubenswrapper[4632]: I1201 07:08:56.990366 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjm7w\" (UniqueName: \"kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-bp886\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:57 crc kubenswrapper[4632]: I1201 07:08:57.127484 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:08:57 crc kubenswrapper[4632]: I1201 07:08:57.599943 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886"] Dec 01 07:08:57 crc kubenswrapper[4632]: I1201 07:08:57.738732 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" event={"ID":"c30c10f4-9c67-4caf-8858-a2e74307ee33","Type":"ContainerStarted","Data":"2010f8c0bc09fdafe6e4dc6402654389ae103b607880ec8515642a7991858933"} Dec 01 07:08:58 crc kubenswrapper[4632]: I1201 07:08:58.766745 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" event={"ID":"c30c10f4-9c67-4caf-8858-a2e74307ee33","Type":"ContainerStarted","Data":"5ca69d66d92f4753bd73fcf6219fbeec7d694987ee124b629a821dd79ab96146"} Dec 01 07:08:58 crc kubenswrapper[4632]: I1201 07:08:58.773919 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" podStartSLOduration=2.149573399 podStartE2EDuration="2.773896125s" podCreationTimestamp="2025-12-01 07:08:56 +0000 UTC" firstStartedPulling="2025-12-01 07:08:57.604401227 +0000 UTC m=+1547.169414200" lastFinishedPulling="2025-12-01 07:08:58.228723953 +0000 UTC m=+1547.793736926" observedRunningTime="2025-12-01 07:08:58.769926557 +0000 UTC m=+1548.334939519" watchObservedRunningTime="2025-12-01 07:08:58.773896125 +0000 UTC m=+1548.338909098" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.573154 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.573630 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.615957 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.750445 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:09:02 crc kubenswrapper[4632]: E1201 07:09:02.750722 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.821556 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:02 crc kubenswrapper[4632]: I1201 07:09:02.860537 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:09:04 crc kubenswrapper[4632]: I1201 07:09:04.807042 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d84tf" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="registry-server" containerID="cri-o://7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02" gracePeriod=2 Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.203453 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.334880 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content\") pod \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.335263 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities\") pod \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.335327 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gdjq\" (UniqueName: \"kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq\") pod \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\" (UID: \"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26\") " Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.335888 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities" (OuterVolumeSpecName: "utilities") pod "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" (UID: "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.336295 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.342511 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq" (OuterVolumeSpecName: "kube-api-access-9gdjq") pod "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" (UID: "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26"). InnerVolumeSpecName "kube-api-access-9gdjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.377688 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" (UID: "6c9b8d4b-67dc-42e2-b3a9-ac647c455d26"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.438221 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.438253 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gdjq\" (UniqueName: \"kubernetes.io/projected/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26-kube-api-access-9gdjq\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.824606 4632 generic.go:334] "Generic (PLEG): container finished" podID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerID="7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02" exitCode=0 Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.824645 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d84tf" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.824654 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerDied","Data":"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02"} Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.824682 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d84tf" event={"ID":"6c9b8d4b-67dc-42e2-b3a9-ac647c455d26","Type":"ContainerDied","Data":"2b921228051fa74616c595dc419ea3f4cab9b43448799843135214b6df78f1ff"} Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.824701 4632 scope.go:117] "RemoveContainer" containerID="7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.841953 4632 scope.go:117] "RemoveContainer" containerID="074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.851845 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.858707 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d84tf"] Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.880154 4632 scope.go:117] "RemoveContainer" containerID="13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.898469 4632 scope.go:117] "RemoveContainer" containerID="7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02" Dec 01 07:09:05 crc kubenswrapper[4632]: E1201 07:09:05.898926 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02\": container with ID starting with 7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02 not found: ID does not exist" containerID="7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.898981 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02"} err="failed to get container status \"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02\": rpc error: code = NotFound desc = could not find container \"7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02\": container with ID starting with 7aee414835c241037087b9dd85984dde222360ea86d76a71e1edb4faabf8ca02 not found: ID does not exist" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.899013 4632 scope.go:117] "RemoveContainer" containerID="074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1" Dec 01 07:09:05 crc kubenswrapper[4632]: E1201 07:09:05.899407 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1\": container with ID starting with 074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1 not found: ID does not exist" containerID="074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.899444 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1"} err="failed to get container status \"074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1\": rpc error: code = NotFound desc = could not find container \"074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1\": container with ID starting with 074c203c0888f61f9bcd5e5b2012e3a18d8a58a6439383f0cac0c93448b165d1 not found: ID does not exist" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.899474 4632 scope.go:117] "RemoveContainer" containerID="13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5" Dec 01 07:09:05 crc kubenswrapper[4632]: E1201 07:09:05.899825 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5\": container with ID starting with 13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5 not found: ID does not exist" containerID="13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5" Dec 01 07:09:05 crc kubenswrapper[4632]: I1201 07:09:05.899854 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5"} err="failed to get container status \"13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5\": rpc error: code = NotFound desc = could not find container \"13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5\": container with ID starting with 13822b4377f095ce48e333e62a4a54914ba1cd9f9cafc80e7392cef9855573b5 not found: ID does not exist" Dec 01 07:09:06 crc kubenswrapper[4632]: I1201 07:09:06.760308 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" path="/var/lib/kubelet/pods/6c9b8d4b-67dc-42e2-b3a9-ac647c455d26/volumes" Dec 01 07:09:13 crc kubenswrapper[4632]: I1201 07:09:13.030873 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-749r7"] Dec 01 07:09:13 crc kubenswrapper[4632]: I1201 07:09:13.036186 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-749r7"] Dec 01 07:09:14 crc kubenswrapper[4632]: I1201 07:09:14.750176 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:09:14 crc kubenswrapper[4632]: E1201 07:09:14.750994 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:09:14 crc kubenswrapper[4632]: I1201 07:09:14.761221 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="350c6392-bd06-44ec-98a2-edd392d66bbf" path="/var/lib/kubelet/pods/350c6392-bd06-44ec-98a2-edd392d66bbf/volumes" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.543654 4632 scope.go:117] "RemoveContainer" containerID="5217b31c470a73a8dc7553d7244bbee4649cf93862343b9d4e3ba4aa6a4f45f5" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.568154 4632 scope.go:117] "RemoveContainer" containerID="72a98ac041315821beeacd8b6aed3148c7431cb17a040fd888d59cca93023c1b" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.603596 4632 scope.go:117] "RemoveContainer" containerID="0634876edc50405a26c444ff1361dd7f980f6903348fee93dcecbd3687dbf080" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.656589 4632 scope.go:117] "RemoveContainer" containerID="6c0b0b9a6d884c4991fe39e3e49bfae5d3aefa4b3f576850703f44fb9cad48c4" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.694561 4632 scope.go:117] "RemoveContainer" containerID="db63ecd323b208a59bae25a8381222ab52945ec5c6a583c001bcd6c7f2d37fde" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.713020 4632 scope.go:117] "RemoveContainer" containerID="c408ae4834ffcbb51601338cd6039690e9278fb564e1d9926a1d5c4150f2883b" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.739281 4632 scope.go:117] "RemoveContainer" containerID="3b2c01c075f3736e44a5a7538a150d0f10e59fd5da228d13e67c52ef1427ee7c" Dec 01 07:09:16 crc kubenswrapper[4632]: I1201 07:09:16.763395 4632 scope.go:117] "RemoveContainer" containerID="ca6343fb56d2fd7a8d303446038bc39577fcc8c4e652137e785a17f43f86ec49" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.268092 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:25 crc kubenswrapper[4632]: E1201 07:09:25.268854 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="registry-server" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.268868 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="registry-server" Dec 01 07:09:25 crc kubenswrapper[4632]: E1201 07:09:25.268886 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="extract-content" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.268892 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="extract-content" Dec 01 07:09:25 crc kubenswrapper[4632]: E1201 07:09:25.268932 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="extract-utilities" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.268940 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="extract-utilities" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.269157 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c9b8d4b-67dc-42e2-b3a9-ac647c455d26" containerName="registry-server" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.270730 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.282133 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.315648 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngtlq\" (UniqueName: \"kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.315847 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.316160 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.417926 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.418080 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngtlq\" (UniqueName: \"kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.418163 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.418524 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.418648 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.442566 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngtlq\" (UniqueName: \"kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq\") pod \"redhat-operators-hgbcj\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:25 crc kubenswrapper[4632]: I1201 07:09:25.588695 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:26 crc kubenswrapper[4632]: I1201 07:09:26.026837 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:27 crc kubenswrapper[4632]: I1201 07:09:27.025542 4632 generic.go:334] "Generic (PLEG): container finished" podID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerID="5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27" exitCode=0 Dec 01 07:09:27 crc kubenswrapper[4632]: I1201 07:09:27.025599 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerDied","Data":"5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27"} Dec 01 07:09:27 crc kubenswrapper[4632]: I1201 07:09:27.025629 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerStarted","Data":"4e4fa05d5f80af1fa892482903884a082a9b451b333ad6fac648e12572636247"} Dec 01 07:09:28 crc kubenswrapper[4632]: I1201 07:09:28.035731 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerStarted","Data":"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55"} Dec 01 07:09:29 crc kubenswrapper[4632]: I1201 07:09:29.750828 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:09:29 crc kubenswrapper[4632]: E1201 07:09:29.751585 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:09:30 crc kubenswrapper[4632]: I1201 07:09:30.054044 4632 generic.go:334] "Generic (PLEG): container finished" podID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerID="f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55" exitCode=0 Dec 01 07:09:30 crc kubenswrapper[4632]: I1201 07:09:30.054091 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerDied","Data":"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55"} Dec 01 07:09:31 crc kubenswrapper[4632]: I1201 07:09:31.066707 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerStarted","Data":"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32"} Dec 01 07:09:31 crc kubenswrapper[4632]: I1201 07:09:31.086340 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hgbcj" podStartSLOduration=2.265635229 podStartE2EDuration="6.086323092s" podCreationTimestamp="2025-12-01 07:09:25 +0000 UTC" firstStartedPulling="2025-12-01 07:09:27.027429781 +0000 UTC m=+1576.592442755" lastFinishedPulling="2025-12-01 07:09:30.848117645 +0000 UTC m=+1580.413130618" observedRunningTime="2025-12-01 07:09:31.083976465 +0000 UTC m=+1580.648989438" watchObservedRunningTime="2025-12-01 07:09:31.086323092 +0000 UTC m=+1580.651336064" Dec 01 07:09:34 crc kubenswrapper[4632]: I1201 07:09:34.099537 4632 generic.go:334] "Generic (PLEG): container finished" podID="c30c10f4-9c67-4caf-8858-a2e74307ee33" containerID="5ca69d66d92f4753bd73fcf6219fbeec7d694987ee124b629a821dd79ab96146" exitCode=0 Dec 01 07:09:34 crc kubenswrapper[4632]: I1201 07:09:34.099627 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" event={"ID":"c30c10f4-9c67-4caf-8858-a2e74307ee33","Type":"ContainerDied","Data":"5ca69d66d92f4753bd73fcf6219fbeec7d694987ee124b629a821dd79ab96146"} Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.464470 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.552521 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjm7w\" (UniqueName: \"kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w\") pod \"c30c10f4-9c67-4caf-8858-a2e74307ee33\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.552634 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory\") pod \"c30c10f4-9c67-4caf-8858-a2e74307ee33\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.552691 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key\") pod \"c30c10f4-9c67-4caf-8858-a2e74307ee33\" (UID: \"c30c10f4-9c67-4caf-8858-a2e74307ee33\") " Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.559509 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w" (OuterVolumeSpecName: "kube-api-access-gjm7w") pod "c30c10f4-9c67-4caf-8858-a2e74307ee33" (UID: "c30c10f4-9c67-4caf-8858-a2e74307ee33"). InnerVolumeSpecName "kube-api-access-gjm7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.573711 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c30c10f4-9c67-4caf-8858-a2e74307ee33" (UID: "c30c10f4-9c67-4caf-8858-a2e74307ee33"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.575524 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory" (OuterVolumeSpecName: "inventory") pod "c30c10f4-9c67-4caf-8858-a2e74307ee33" (UID: "c30c10f4-9c67-4caf-8858-a2e74307ee33"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.589800 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.589860 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.628323 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.655607 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.655663 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjm7w\" (UniqueName: \"kubernetes.io/projected/c30c10f4-9c67-4caf-8858-a2e74307ee33-kube-api-access-gjm7w\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:35 crc kubenswrapper[4632]: I1201 07:09:35.655679 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c30c10f4-9c67-4caf-8858-a2e74307ee33-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.124558 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" event={"ID":"c30c10f4-9c67-4caf-8858-a2e74307ee33","Type":"ContainerDied","Data":"2010f8c0bc09fdafe6e4dc6402654389ae103b607880ec8515642a7991858933"} Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.124627 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2010f8c0bc09fdafe6e4dc6402654389ae103b607880ec8515642a7991858933" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.124571 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-bp886" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.171045 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.193635 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bqvjq"] Dec 01 07:09:36 crc kubenswrapper[4632]: E1201 07:09:36.194102 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30c10f4-9c67-4caf-8858-a2e74307ee33" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.194122 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30c10f4-9c67-4caf-8858-a2e74307ee33" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.194293 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30c10f4-9c67-4caf-8858-a2e74307ee33" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.194912 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.196564 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.196699 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.196995 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.197983 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.201715 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bqvjq"] Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.232466 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.267760 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.268170 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffkld\" (UniqueName: \"kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.268261 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.369460 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffkld\" (UniqueName: \"kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.369530 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.369582 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.373730 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.375373 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.384272 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffkld\" (UniqueName: \"kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld\") pod \"ssh-known-hosts-edpm-deployment-bqvjq\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.509230 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:36 crc kubenswrapper[4632]: I1201 07:09:36.950856 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-bqvjq"] Dec 01 07:09:37 crc kubenswrapper[4632]: I1201 07:09:37.055829 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-d2q4m"] Dec 01 07:09:37 crc kubenswrapper[4632]: I1201 07:09:37.065309 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-d2q4m"] Dec 01 07:09:37 crc kubenswrapper[4632]: I1201 07:09:37.132827 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" event={"ID":"6e818cb4-7d89-4c61-8bd4-8b15b748ed38","Type":"ContainerStarted","Data":"ec6d4a12a653768cdeb4025a337ba2b15d0baaa232990b454d213212f0e2f1b6"} Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.032479 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-hdlnh"] Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.039286 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-hdlnh"] Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.143656 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" event={"ID":"6e818cb4-7d89-4c61-8bd4-8b15b748ed38","Type":"ContainerStarted","Data":"ad2ad534700ed31b6d6b99f1f3a4a393231db7cee1f30b69a99340d1f70e8f62"} Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.144060 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hgbcj" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="registry-server" containerID="cri-o://42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32" gracePeriod=2 Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.160618 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" podStartSLOduration=1.416484296 podStartE2EDuration="2.160600599s" podCreationTimestamp="2025-12-01 07:09:36 +0000 UTC" firstStartedPulling="2025-12-01 07:09:36.959322513 +0000 UTC m=+1586.524335475" lastFinishedPulling="2025-12-01 07:09:37.703438805 +0000 UTC m=+1587.268451778" observedRunningTime="2025-12-01 07:09:38.15803509 +0000 UTC m=+1587.723048053" watchObservedRunningTime="2025-12-01 07:09:38.160600599 +0000 UTC m=+1587.725613562" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.506950 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.619641 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities\") pod \"fc9b21c2-a566-455a-b4bd-17f0c214a630\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.619981 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngtlq\" (UniqueName: \"kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq\") pod \"fc9b21c2-a566-455a-b4bd-17f0c214a630\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.620119 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content\") pod \"fc9b21c2-a566-455a-b4bd-17f0c214a630\" (UID: \"fc9b21c2-a566-455a-b4bd-17f0c214a630\") " Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.620633 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities" (OuterVolumeSpecName: "utilities") pod "fc9b21c2-a566-455a-b4bd-17f0c214a630" (UID: "fc9b21c2-a566-455a-b4bd-17f0c214a630"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.621304 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.626522 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq" (OuterVolumeSpecName: "kube-api-access-ngtlq") pod "fc9b21c2-a566-455a-b4bd-17f0c214a630" (UID: "fc9b21c2-a566-455a-b4bd-17f0c214a630"). InnerVolumeSpecName "kube-api-access-ngtlq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.705565 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fc9b21c2-a566-455a-b4bd-17f0c214a630" (UID: "fc9b21c2-a566-455a-b4bd-17f0c214a630"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.723862 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngtlq\" (UniqueName: \"kubernetes.io/projected/fc9b21c2-a566-455a-b4bd-17f0c214a630-kube-api-access-ngtlq\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.723900 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fc9b21c2-a566-455a-b4bd-17f0c214a630-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.761536 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80cde8b2-5981-4e87-a781-db78ead1e0e1" path="/var/lib/kubelet/pods/80cde8b2-5981-4e87-a781-db78ead1e0e1/volumes" Dec 01 07:09:38 crc kubenswrapper[4632]: I1201 07:09:38.762166 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85" path="/var/lib/kubelet/pods/a1ce8f92-e8d7-4a6f-8d9c-bca1c7061a85/volumes" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.153307 4632 generic.go:334] "Generic (PLEG): container finished" podID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerID="42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32" exitCode=0 Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.153443 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hgbcj" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.153493 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerDied","Data":"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32"} Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.153526 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hgbcj" event={"ID":"fc9b21c2-a566-455a-b4bd-17f0c214a630","Type":"ContainerDied","Data":"4e4fa05d5f80af1fa892482903884a082a9b451b333ad6fac648e12572636247"} Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.153546 4632 scope.go:117] "RemoveContainer" containerID="42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.176502 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.180550 4632 scope.go:117] "RemoveContainer" containerID="f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.184598 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hgbcj"] Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.201486 4632 scope.go:117] "RemoveContainer" containerID="5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.236827 4632 scope.go:117] "RemoveContainer" containerID="42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32" Dec 01 07:09:39 crc kubenswrapper[4632]: E1201 07:09:39.237193 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32\": container with ID starting with 42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32 not found: ID does not exist" containerID="42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.237226 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32"} err="failed to get container status \"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32\": rpc error: code = NotFound desc = could not find container \"42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32\": container with ID starting with 42c2ac98b252f856beded92d6b266fd56bba4dd6ceeaab6223cc977deb932a32 not found: ID does not exist" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.237250 4632 scope.go:117] "RemoveContainer" containerID="f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55" Dec 01 07:09:39 crc kubenswrapper[4632]: E1201 07:09:39.237533 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55\": container with ID starting with f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55 not found: ID does not exist" containerID="f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.237582 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55"} err="failed to get container status \"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55\": rpc error: code = NotFound desc = could not find container \"f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55\": container with ID starting with f3ca79e7bcc401d2222133fc799586bbe7ad481ba85ece1d6e175aae70826a55 not found: ID does not exist" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.237615 4632 scope.go:117] "RemoveContainer" containerID="5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27" Dec 01 07:09:39 crc kubenswrapper[4632]: E1201 07:09:39.237857 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27\": container with ID starting with 5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27 not found: ID does not exist" containerID="5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27" Dec 01 07:09:39 crc kubenswrapper[4632]: I1201 07:09:39.237882 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27"} err="failed to get container status \"5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27\": rpc error: code = NotFound desc = could not find container \"5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27\": container with ID starting with 5706722ed802abb88c9a5d89f1c509809ed01b37d7ee19e08e3ce6dd3c391b27 not found: ID does not exist" Dec 01 07:09:40 crc kubenswrapper[4632]: I1201 07:09:40.774189 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" path="/var/lib/kubelet/pods/fc9b21c2-a566-455a-b4bd-17f0c214a630/volumes" Dec 01 07:09:43 crc kubenswrapper[4632]: I1201 07:09:43.193044 4632 generic.go:334] "Generic (PLEG): container finished" podID="6e818cb4-7d89-4c61-8bd4-8b15b748ed38" containerID="ad2ad534700ed31b6d6b99f1f3a4a393231db7cee1f30b69a99340d1f70e8f62" exitCode=0 Dec 01 07:09:43 crc kubenswrapper[4632]: I1201 07:09:43.193199 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" event={"ID":"6e818cb4-7d89-4c61-8bd4-8b15b748ed38","Type":"ContainerDied","Data":"ad2ad534700ed31b6d6b99f1f3a4a393231db7cee1f30b69a99340d1f70e8f62"} Dec 01 07:09:43 crc kubenswrapper[4632]: I1201 07:09:43.750072 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:09:43 crc kubenswrapper[4632]: E1201 07:09:43.750289 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.531872 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.540887 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0\") pod \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.540949 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffkld\" (UniqueName: \"kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld\") pod \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.541075 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam\") pod \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\" (UID: \"6e818cb4-7d89-4c61-8bd4-8b15b748ed38\") " Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.549558 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld" (OuterVolumeSpecName: "kube-api-access-ffkld") pod "6e818cb4-7d89-4c61-8bd4-8b15b748ed38" (UID: "6e818cb4-7d89-4c61-8bd4-8b15b748ed38"). InnerVolumeSpecName "kube-api-access-ffkld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.565234 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "6e818cb4-7d89-4c61-8bd4-8b15b748ed38" (UID: "6e818cb4-7d89-4c61-8bd4-8b15b748ed38"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.568607 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "6e818cb4-7d89-4c61-8bd4-8b15b748ed38" (UID: "6e818cb4-7d89-4c61-8bd4-8b15b748ed38"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.643487 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.643522 4632 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:44 crc kubenswrapper[4632]: I1201 07:09:44.643532 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffkld\" (UniqueName: \"kubernetes.io/projected/6e818cb4-7d89-4c61-8bd4-8b15b748ed38-kube-api-access-ffkld\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.211080 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" event={"ID":"6e818cb4-7d89-4c61-8bd4-8b15b748ed38","Type":"ContainerDied","Data":"ec6d4a12a653768cdeb4025a337ba2b15d0baaa232990b454d213212f0e2f1b6"} Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.211408 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec6d4a12a653768cdeb4025a337ba2b15d0baaa232990b454d213212f0e2f1b6" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.211169 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-bqvjq" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.269478 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr"] Dec 01 07:09:45 crc kubenswrapper[4632]: E1201 07:09:45.269854 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e818cb4-7d89-4c61-8bd4-8b15b748ed38" containerName="ssh-known-hosts-edpm-deployment" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.269875 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e818cb4-7d89-4c61-8bd4-8b15b748ed38" containerName="ssh-known-hosts-edpm-deployment" Dec 01 07:09:45 crc kubenswrapper[4632]: E1201 07:09:45.269899 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="registry-server" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.269906 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="registry-server" Dec 01 07:09:45 crc kubenswrapper[4632]: E1201 07:09:45.269952 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="extract-utilities" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.269960 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="extract-utilities" Dec 01 07:09:45 crc kubenswrapper[4632]: E1201 07:09:45.269975 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="extract-content" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.269980 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="extract-content" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.270382 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e818cb4-7d89-4c61-8bd4-8b15b748ed38" containerName="ssh-known-hosts-edpm-deployment" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.270407 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc9b21c2-a566-455a-b4bd-17f0c214a630" containerName="registry-server" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.270948 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.273812 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.274250 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.274579 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.276620 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.289165 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr"] Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.458897 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98gk6\" (UniqueName: \"kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.458938 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.458974 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.560367 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98gk6\" (UniqueName: \"kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.560411 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.560445 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.565289 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.565694 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.575065 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98gk6\" (UniqueName: \"kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-n5wcr\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:45 crc kubenswrapper[4632]: I1201 07:09:45.585540 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:46 crc kubenswrapper[4632]: I1201 07:09:46.028932 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr"] Dec 01 07:09:46 crc kubenswrapper[4632]: I1201 07:09:46.220272 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" event={"ID":"3debc8c5-6f78-44c9-9f2d-4207eeec3b11","Type":"ContainerStarted","Data":"2fe24244aaff9fa4111c649810211aac83664421cc04c8098e9ed39a657540db"} Dec 01 07:09:47 crc kubenswrapper[4632]: I1201 07:09:47.230870 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" event={"ID":"3debc8c5-6f78-44c9-9f2d-4207eeec3b11","Type":"ContainerStarted","Data":"92760f6cbf0345defde90df592743429c424573b04fb6bf08eaf87b5ad545b95"} Dec 01 07:09:47 crc kubenswrapper[4632]: I1201 07:09:47.245242 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" podStartSLOduration=1.751998921 podStartE2EDuration="2.245212097s" podCreationTimestamp="2025-12-01 07:09:45 +0000 UTC" firstStartedPulling="2025-12-01 07:09:46.030560474 +0000 UTC m=+1595.595573457" lastFinishedPulling="2025-12-01 07:09:46.52377366 +0000 UTC m=+1596.088786633" observedRunningTime="2025-12-01 07:09:47.244966444 +0000 UTC m=+1596.809979417" watchObservedRunningTime="2025-12-01 07:09:47.245212097 +0000 UTC m=+1596.810225071" Dec 01 07:09:53 crc kubenswrapper[4632]: I1201 07:09:53.290050 4632 generic.go:334] "Generic (PLEG): container finished" podID="3debc8c5-6f78-44c9-9f2d-4207eeec3b11" containerID="92760f6cbf0345defde90df592743429c424573b04fb6bf08eaf87b5ad545b95" exitCode=0 Dec 01 07:09:53 crc kubenswrapper[4632]: I1201 07:09:53.290122 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" event={"ID":"3debc8c5-6f78-44c9-9f2d-4207eeec3b11","Type":"ContainerDied","Data":"92760f6cbf0345defde90df592743429c424573b04fb6bf08eaf87b5ad545b95"} Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.601502 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.629604 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98gk6\" (UniqueName: \"kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6\") pod \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.629868 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory\") pod \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.629908 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key\") pod \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\" (UID: \"3debc8c5-6f78-44c9-9f2d-4207eeec3b11\") " Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.635095 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6" (OuterVolumeSpecName: "kube-api-access-98gk6") pod "3debc8c5-6f78-44c9-9f2d-4207eeec3b11" (UID: "3debc8c5-6f78-44c9-9f2d-4207eeec3b11"). InnerVolumeSpecName "kube-api-access-98gk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.652388 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory" (OuterVolumeSpecName: "inventory") pod "3debc8c5-6f78-44c9-9f2d-4207eeec3b11" (UID: "3debc8c5-6f78-44c9-9f2d-4207eeec3b11"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.652492 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3debc8c5-6f78-44c9-9f2d-4207eeec3b11" (UID: "3debc8c5-6f78-44c9-9f2d-4207eeec3b11"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.732468 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98gk6\" (UniqueName: \"kubernetes.io/projected/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-kube-api-access-98gk6\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.732528 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.732557 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3debc8c5-6f78-44c9-9f2d-4207eeec3b11-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:09:54 crc kubenswrapper[4632]: I1201 07:09:54.750852 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:09:54 crc kubenswrapper[4632]: E1201 07:09:54.751346 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.307184 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" event={"ID":"3debc8c5-6f78-44c9-9f2d-4207eeec3b11","Type":"ContainerDied","Data":"2fe24244aaff9fa4111c649810211aac83664421cc04c8098e9ed39a657540db"} Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.307245 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fe24244aaff9fa4111c649810211aac83664421cc04c8098e9ed39a657540db" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.307293 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-n5wcr" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.363048 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx"] Dec 01 07:09:55 crc kubenswrapper[4632]: E1201 07:09:55.364155 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3debc8c5-6f78-44c9-9f2d-4207eeec3b11" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.364191 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="3debc8c5-6f78-44c9-9f2d-4207eeec3b11" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.364578 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="3debc8c5-6f78-44c9-9f2d-4207eeec3b11" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.365763 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.368000 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.368526 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.370444 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.371372 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx"] Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.373884 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.446129 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.446300 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.446901 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfhnd\" (UniqueName: \"kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.548145 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.548245 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.548330 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfhnd\" (UniqueName: \"kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.553613 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.554977 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.562940 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfhnd\" (UniqueName: \"kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:55 crc kubenswrapper[4632]: I1201 07:09:55.683493 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:09:56 crc kubenswrapper[4632]: I1201 07:09:56.139462 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx"] Dec 01 07:09:56 crc kubenswrapper[4632]: I1201 07:09:56.315136 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" event={"ID":"c9a462bc-acd6-4d48-b78b-3584fdb57851","Type":"ContainerStarted","Data":"608daa654cbcbdc1078b97ab689013e3edfc846f9be0fabbec8257e91942a253"} Dec 01 07:09:57 crc kubenswrapper[4632]: I1201 07:09:57.348704 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" event={"ID":"c9a462bc-acd6-4d48-b78b-3584fdb57851","Type":"ContainerStarted","Data":"1d9d76d61c47c4bc0d15fab5d4543c0452ea9bd95e082b9bca76b9f55df3f095"} Dec 01 07:09:57 crc kubenswrapper[4632]: I1201 07:09:57.370166 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" podStartSLOduration=1.812786896 podStartE2EDuration="2.370155061s" podCreationTimestamp="2025-12-01 07:09:55 +0000 UTC" firstStartedPulling="2025-12-01 07:09:56.142251182 +0000 UTC m=+1605.707264155" lastFinishedPulling="2025-12-01 07:09:56.699619347 +0000 UTC m=+1606.264632320" observedRunningTime="2025-12-01 07:09:57.368052094 +0000 UTC m=+1606.933065067" watchObservedRunningTime="2025-12-01 07:09:57.370155061 +0000 UTC m=+1606.935168034" Dec 01 07:10:04 crc kubenswrapper[4632]: I1201 07:10:04.408442 4632 generic.go:334] "Generic (PLEG): container finished" podID="c9a462bc-acd6-4d48-b78b-3584fdb57851" containerID="1d9d76d61c47c4bc0d15fab5d4543c0452ea9bd95e082b9bca76b9f55df3f095" exitCode=0 Dec 01 07:10:04 crc kubenswrapper[4632]: I1201 07:10:04.408523 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" event={"ID":"c9a462bc-acd6-4d48-b78b-3584fdb57851","Type":"ContainerDied","Data":"1d9d76d61c47c4bc0d15fab5d4543c0452ea9bd95e082b9bca76b9f55df3f095"} Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.748235 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.865603 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfhnd\" (UniqueName: \"kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd\") pod \"c9a462bc-acd6-4d48-b78b-3584fdb57851\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.865674 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory\") pod \"c9a462bc-acd6-4d48-b78b-3584fdb57851\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.865995 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key\") pod \"c9a462bc-acd6-4d48-b78b-3584fdb57851\" (UID: \"c9a462bc-acd6-4d48-b78b-3584fdb57851\") " Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.871574 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd" (OuterVolumeSpecName: "kube-api-access-cfhnd") pod "c9a462bc-acd6-4d48-b78b-3584fdb57851" (UID: "c9a462bc-acd6-4d48-b78b-3584fdb57851"). InnerVolumeSpecName "kube-api-access-cfhnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.888765 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory" (OuterVolumeSpecName: "inventory") pod "c9a462bc-acd6-4d48-b78b-3584fdb57851" (UID: "c9a462bc-acd6-4d48-b78b-3584fdb57851"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.889637 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c9a462bc-acd6-4d48-b78b-3584fdb57851" (UID: "c9a462bc-acd6-4d48-b78b-3584fdb57851"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.968044 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.968341 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfhnd\" (UniqueName: \"kubernetes.io/projected/c9a462bc-acd6-4d48-b78b-3584fdb57851-kube-api-access-cfhnd\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:05 crc kubenswrapper[4632]: I1201 07:10:05.968371 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c9a462bc-acd6-4d48-b78b-3584fdb57851-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.424228 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" event={"ID":"c9a462bc-acd6-4d48-b78b-3584fdb57851","Type":"ContainerDied","Data":"608daa654cbcbdc1078b97ab689013e3edfc846f9be0fabbec8257e91942a253"} Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.424274 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="608daa654cbcbdc1078b97ab689013e3edfc846f9be0fabbec8257e91942a253" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.424298 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.515721 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb"] Dec 01 07:10:06 crc kubenswrapper[4632]: E1201 07:10:06.516178 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a462bc-acd6-4d48-b78b-3584fdb57851" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.516195 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a462bc-acd6-4d48-b78b-3584fdb57851" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.517623 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9a462bc-acd6-4d48-b78b-3584fdb57851" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.518583 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521272 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521463 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521675 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521819 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521895 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.521938 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.522510 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.531120 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.533637 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb"] Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.577894 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.577938 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578070 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578125 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578309 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578382 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578525 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578555 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578595 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578626 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgg7m\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578714 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578736 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578846 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.578872 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.681703 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682124 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682257 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682376 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682487 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682577 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgg7m\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682700 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682777 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.682941 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.683646 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.683825 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.683909 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.684005 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.684113 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.686867 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.687196 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.687575 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.688017 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.688342 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.688933 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.688988 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.689375 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.689570 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.689622 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.690181 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.690390 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.693378 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.697636 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgg7m\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-44xkb\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:06 crc kubenswrapper[4632]: I1201 07:10:06.833533 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:07 crc kubenswrapper[4632]: I1201 07:10:07.277236 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb"] Dec 01 07:10:07 crc kubenswrapper[4632]: I1201 07:10:07.434877 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" event={"ID":"252ea9f1-a749-4524-9a53-dffbad624ea7","Type":"ContainerStarted","Data":"45ebf9723cdbf60e3b556690a06bafba9c1d7af88e3fcb9c26df1577aaa4c9aa"} Dec 01 07:10:07 crc kubenswrapper[4632]: I1201 07:10:07.750652 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:10:07 crc kubenswrapper[4632]: E1201 07:10:07.750945 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:10:08 crc kubenswrapper[4632]: I1201 07:10:08.457391 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" event={"ID":"252ea9f1-a749-4524-9a53-dffbad624ea7","Type":"ContainerStarted","Data":"d5beee5994e6a4424e522e34513952a33ef8f6d558a0e431667c9f36a4362a00"} Dec 01 07:10:08 crc kubenswrapper[4632]: I1201 07:10:08.472417 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" podStartSLOduration=1.8870339999999999 podStartE2EDuration="2.472400766s" podCreationTimestamp="2025-12-01 07:10:06 +0000 UTC" firstStartedPulling="2025-12-01 07:10:07.280182498 +0000 UTC m=+1616.845195462" lastFinishedPulling="2025-12-01 07:10:07.865549255 +0000 UTC m=+1617.430562228" observedRunningTime="2025-12-01 07:10:08.471018249 +0000 UTC m=+1618.036031222" watchObservedRunningTime="2025-12-01 07:10:08.472400766 +0000 UTC m=+1618.037413739" Dec 01 07:10:16 crc kubenswrapper[4632]: I1201 07:10:16.949388 4632 scope.go:117] "RemoveContainer" containerID="e445f124ec56419305d784c1676a9f4c6dc844e88ab40d2431e04b07f2507011" Dec 01 07:10:16 crc kubenswrapper[4632]: I1201 07:10:16.990210 4632 scope.go:117] "RemoveContainer" containerID="274a9456e3bd82048198f00dfa297466f4dca496059f6fdf2f21e8fc184a0edd" Dec 01 07:10:21 crc kubenswrapper[4632]: I1201 07:10:21.750271 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:10:21 crc kubenswrapper[4632]: E1201 07:10:21.751181 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:10:24 crc kubenswrapper[4632]: I1201 07:10:24.034957 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-hgt5p"] Dec 01 07:10:24 crc kubenswrapper[4632]: I1201 07:10:24.041247 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-hgt5p"] Dec 01 07:10:24 crc kubenswrapper[4632]: I1201 07:10:24.770679 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06654851-ed77-41a9-9cea-0baaadb44227" path="/var/lib/kubelet/pods/06654851-ed77-41a9-9cea-0baaadb44227/volumes" Dec 01 07:10:34 crc kubenswrapper[4632]: I1201 07:10:34.667033 4632 generic.go:334] "Generic (PLEG): container finished" podID="252ea9f1-a749-4524-9a53-dffbad624ea7" containerID="d5beee5994e6a4424e522e34513952a33ef8f6d558a0e431667c9f36a4362a00" exitCode=0 Dec 01 07:10:34 crc kubenswrapper[4632]: I1201 07:10:34.667116 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" event={"ID":"252ea9f1-a749-4524-9a53-dffbad624ea7","Type":"ContainerDied","Data":"d5beee5994e6a4424e522e34513952a33ef8f6d558a0e431667c9f36a4362a00"} Dec 01 07:10:35 crc kubenswrapper[4632]: I1201 07:10:35.993484 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.106885 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.106927 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.106978 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107011 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107067 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107091 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgg7m\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107119 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107188 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107212 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107231 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107255 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107288 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107318 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.107391 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"252ea9f1-a749-4524-9a53-dffbad624ea7\" (UID: \"252ea9f1-a749-4524-9a53-dffbad624ea7\") " Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.114017 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.114364 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.114995 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.115101 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.115137 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m" (OuterVolumeSpecName: "kube-api-access-hgg7m") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "kube-api-access-hgg7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.115173 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.115681 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.116062 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.116212 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.116613 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.117749 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.118141 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.131645 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory" (OuterVolumeSpecName: "inventory") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.133166 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "252ea9f1-a749-4524-9a53-dffbad624ea7" (UID: "252ea9f1-a749-4524-9a53-dffbad624ea7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209877 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209907 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209922 4632 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209934 4632 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209945 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209953 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgg7m\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-kube-api-access-hgg7m\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209963 4632 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209971 4632 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209982 4632 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.209991 4632 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.210002 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.210013 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.210022 4632 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/252ea9f1-a749-4524-9a53-dffbad624ea7-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.210032 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/252ea9f1-a749-4524-9a53-dffbad624ea7-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.683859 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" event={"ID":"252ea9f1-a749-4524-9a53-dffbad624ea7","Type":"ContainerDied","Data":"45ebf9723cdbf60e3b556690a06bafba9c1d7af88e3fcb9c26df1577aaa4c9aa"} Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.684206 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45ebf9723cdbf60e3b556690a06bafba9c1d7af88e3fcb9c26df1577aaa4c9aa" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.683917 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-44xkb" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.750846 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:10:36 crc kubenswrapper[4632]: E1201 07:10:36.751286 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.770055 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj"] Dec 01 07:10:36 crc kubenswrapper[4632]: E1201 07:10:36.770584 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252ea9f1-a749-4524-9a53-dffbad624ea7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.770606 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="252ea9f1-a749-4524-9a53-dffbad624ea7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.770810 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="252ea9f1-a749-4524-9a53-dffbad624ea7" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.771606 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.773057 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.773195 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.774312 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.774477 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.774672 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.780831 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj"] Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.821079 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmn54\" (UniqueName: \"kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.821137 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.821194 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.821218 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.821348 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.923149 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmn54\" (UniqueName: \"kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.923246 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.923305 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.923333 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.923490 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.925575 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.926925 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.926974 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.927890 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:36 crc kubenswrapper[4632]: I1201 07:10:36.937105 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmn54\" (UniqueName: \"kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r4jjj\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:37 crc kubenswrapper[4632]: I1201 07:10:37.094247 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:10:37 crc kubenswrapper[4632]: I1201 07:10:37.537682 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj"] Dec 01 07:10:37 crc kubenswrapper[4632]: I1201 07:10:37.692553 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" event={"ID":"a1e089cc-f5f6-476a-af14-d25cd1150efd","Type":"ContainerStarted","Data":"ab5dc8c632196314676e03627eb962a218beba22088b89dfe6b56cf9eee6357c"} Dec 01 07:10:38 crc kubenswrapper[4632]: I1201 07:10:38.703345 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" event={"ID":"a1e089cc-f5f6-476a-af14-d25cd1150efd","Type":"ContainerStarted","Data":"d2f96877b707d8ba8f6243096449d7f6de6effa28ba33e614219a2cb4ba77744"} Dec 01 07:10:38 crc kubenswrapper[4632]: I1201 07:10:38.722221 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" podStartSLOduration=2.143028267 podStartE2EDuration="2.72220738s" podCreationTimestamp="2025-12-01 07:10:36 +0000 UTC" firstStartedPulling="2025-12-01 07:10:37.536501693 +0000 UTC m=+1647.101514666" lastFinishedPulling="2025-12-01 07:10:38.115680806 +0000 UTC m=+1647.680693779" observedRunningTime="2025-12-01 07:10:38.71693101 +0000 UTC m=+1648.281943983" watchObservedRunningTime="2025-12-01 07:10:38.72220738 +0000 UTC m=+1648.287220352" Dec 01 07:10:48 crc kubenswrapper[4632]: I1201 07:10:48.751039 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:10:48 crc kubenswrapper[4632]: E1201 07:10:48.751984 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:03 crc kubenswrapper[4632]: I1201 07:11:03.750438 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:11:03 crc kubenswrapper[4632]: E1201 07:11:03.751322 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:15 crc kubenswrapper[4632]: I1201 07:11:15.750438 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:11:15 crc kubenswrapper[4632]: E1201 07:11:15.751413 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:17 crc kubenswrapper[4632]: I1201 07:11:17.075078 4632 scope.go:117] "RemoveContainer" containerID="af011fdcc774b7f78997b6e480cdcf8b6eaaac4b4b53410b3a480141b4f81314" Dec 01 07:11:23 crc kubenswrapper[4632]: I1201 07:11:23.065587 4632 generic.go:334] "Generic (PLEG): container finished" podID="a1e089cc-f5f6-476a-af14-d25cd1150efd" containerID="d2f96877b707d8ba8f6243096449d7f6de6effa28ba33e614219a2cb4ba77744" exitCode=0 Dec 01 07:11:23 crc kubenswrapper[4632]: I1201 07:11:23.065678 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" event={"ID":"a1e089cc-f5f6-476a-af14-d25cd1150efd","Type":"ContainerDied","Data":"d2f96877b707d8ba8f6243096449d7f6de6effa28ba33e614219a2cb4ba77744"} Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.415855 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.593390 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0\") pod \"a1e089cc-f5f6-476a-af14-d25cd1150efd\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.593595 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory\") pod \"a1e089cc-f5f6-476a-af14-d25cd1150efd\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.593685 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key\") pod \"a1e089cc-f5f6-476a-af14-d25cd1150efd\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.593888 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle\") pod \"a1e089cc-f5f6-476a-af14-d25cd1150efd\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.593982 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmn54\" (UniqueName: \"kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54\") pod \"a1e089cc-f5f6-476a-af14-d25cd1150efd\" (UID: \"a1e089cc-f5f6-476a-af14-d25cd1150efd\") " Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.599883 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "a1e089cc-f5f6-476a-af14-d25cd1150efd" (UID: "a1e089cc-f5f6-476a-af14-d25cd1150efd"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.600451 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54" (OuterVolumeSpecName: "kube-api-access-lmn54") pod "a1e089cc-f5f6-476a-af14-d25cd1150efd" (UID: "a1e089cc-f5f6-476a-af14-d25cd1150efd"). InnerVolumeSpecName "kube-api-access-lmn54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.617437 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "a1e089cc-f5f6-476a-af14-d25cd1150efd" (UID: "a1e089cc-f5f6-476a-af14-d25cd1150efd"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.621899 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a1e089cc-f5f6-476a-af14-d25cd1150efd" (UID: "a1e089cc-f5f6-476a-af14-d25cd1150efd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.622021 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory" (OuterVolumeSpecName: "inventory") pod "a1e089cc-f5f6-476a-af14-d25cd1150efd" (UID: "a1e089cc-f5f6-476a-af14-d25cd1150efd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.696824 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.696858 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.696871 4632 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.696890 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmn54\" (UniqueName: \"kubernetes.io/projected/a1e089cc-f5f6-476a-af14-d25cd1150efd-kube-api-access-lmn54\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:24 crc kubenswrapper[4632]: I1201 07:11:24.696903 4632 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/a1e089cc-f5f6-476a-af14-d25cd1150efd-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.091561 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" event={"ID":"a1e089cc-f5f6-476a-af14-d25cd1150efd","Type":"ContainerDied","Data":"ab5dc8c632196314676e03627eb962a218beba22088b89dfe6b56cf9eee6357c"} Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.091899 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab5dc8c632196314676e03627eb962a218beba22088b89dfe6b56cf9eee6357c" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.091660 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r4jjj" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.153225 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd"] Dec 01 07:11:25 crc kubenswrapper[4632]: E1201 07:11:25.153775 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1e089cc-f5f6-476a-af14-d25cd1150efd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.153793 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1e089cc-f5f6-476a-af14-d25cd1150efd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.153988 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1e089cc-f5f6-476a-af14-d25cd1150efd" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.154784 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.156786 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.158014 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.158449 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.158516 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.158678 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.158705 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.160884 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd"] Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.309333 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.309570 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.309710 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.310024 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6l4t\" (UniqueName: \"kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.310127 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.310188 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.410995 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.411094 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.411131 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.411183 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6l4t\" (UniqueName: \"kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.411219 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.411243 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.416627 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.416670 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.416747 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.417349 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.417858 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.426328 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6l4t\" (UniqueName: \"kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.470418 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:11:25 crc kubenswrapper[4632]: I1201 07:11:25.912445 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd"] Dec 01 07:11:26 crc kubenswrapper[4632]: I1201 07:11:26.100459 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" event={"ID":"1ca8649a-8e09-4edd-9f1f-72277996e08d","Type":"ContainerStarted","Data":"5567cc0693359c0fa9ba4858feada8da2b6e7534af24f0190e6dc0e9bd3e9801"} Dec 01 07:11:26 crc kubenswrapper[4632]: I1201 07:11:26.751430 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:11:26 crc kubenswrapper[4632]: E1201 07:11:26.751629 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:27 crc kubenswrapper[4632]: I1201 07:11:27.110392 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" event={"ID":"1ca8649a-8e09-4edd-9f1f-72277996e08d","Type":"ContainerStarted","Data":"6874eb6a996fe840b3bae97ff905a13a7140e2fada60d27231071be0e6aaf776"} Dec 01 07:11:27 crc kubenswrapper[4632]: I1201 07:11:27.129757 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" podStartSLOduration=1.550338639 podStartE2EDuration="2.129731024s" podCreationTimestamp="2025-12-01 07:11:25 +0000 UTC" firstStartedPulling="2025-12-01 07:11:25.92057839 +0000 UTC m=+1695.485591363" lastFinishedPulling="2025-12-01 07:11:26.499970775 +0000 UTC m=+1696.064983748" observedRunningTime="2025-12-01 07:11:27.122187868 +0000 UTC m=+1696.687200841" watchObservedRunningTime="2025-12-01 07:11:27.129731024 +0000 UTC m=+1696.694743996" Dec 01 07:11:41 crc kubenswrapper[4632]: I1201 07:11:41.749733 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:11:41 crc kubenswrapper[4632]: E1201 07:11:41.750541 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:53 crc kubenswrapper[4632]: I1201 07:11:53.751706 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:11:53 crc kubenswrapper[4632]: E1201 07:11:53.752787 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:11:59 crc kubenswrapper[4632]: I1201 07:11:59.345814 4632 generic.go:334] "Generic (PLEG): container finished" podID="1ca8649a-8e09-4edd-9f1f-72277996e08d" containerID="6874eb6a996fe840b3bae97ff905a13a7140e2fada60d27231071be0e6aaf776" exitCode=0 Dec 01 07:11:59 crc kubenswrapper[4632]: I1201 07:11:59.345908 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" event={"ID":"1ca8649a-8e09-4edd-9f1f-72277996e08d","Type":"ContainerDied","Data":"6874eb6a996fe840b3bae97ff905a13a7140e2fada60d27231071be0e6aaf776"} Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.687480 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.863974 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.864157 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.864187 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.864260 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.864337 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.864464 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6l4t\" (UniqueName: \"kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t\") pod \"1ca8649a-8e09-4edd-9f1f-72277996e08d\" (UID: \"1ca8649a-8e09-4edd-9f1f-72277996e08d\") " Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.870183 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t" (OuterVolumeSpecName: "kube-api-access-f6l4t") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "kube-api-access-f6l4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.871986 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.889767 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.890175 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory" (OuterVolumeSpecName: "inventory") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.890522 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.891797 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1ca8649a-8e09-4edd-9f1f-72277996e08d" (UID: "1ca8649a-8e09-4edd-9f1f-72277996e08d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.967945 4632 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.968160 4632 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.968173 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.968183 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.968194 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6l4t\" (UniqueName: \"kubernetes.io/projected/1ca8649a-8e09-4edd-9f1f-72277996e08d-kube-api-access-f6l4t\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:00 crc kubenswrapper[4632]: I1201 07:12:00.968204 4632 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ca8649a-8e09-4edd-9f1f-72277996e08d-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.364843 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" event={"ID":"1ca8649a-8e09-4edd-9f1f-72277996e08d","Type":"ContainerDied","Data":"5567cc0693359c0fa9ba4858feada8da2b6e7534af24f0190e6dc0e9bd3e9801"} Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.364890 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.364913 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5567cc0693359c0fa9ba4858feada8da2b6e7534af24f0190e6dc0e9bd3e9801" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.527286 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq"] Dec 01 07:12:01 crc kubenswrapper[4632]: E1201 07:12:01.528181 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ca8649a-8e09-4edd-9f1f-72277996e08d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.528216 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ca8649a-8e09-4edd-9f1f-72277996e08d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.528601 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ca8649a-8e09-4edd-9f1f-72277996e08d" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.529761 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.533628 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.534973 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.535196 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.535453 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.536993 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq"] Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.537385 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.684674 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.684775 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.684814 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.684836 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw2zz\" (UniqueName: \"kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.684859 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.787056 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.787186 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.787232 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.787261 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw2zz\" (UniqueName: \"kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.787287 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.792451 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.793143 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.793246 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.793295 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.802555 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw2zz\" (UniqueName: \"kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:01 crc kubenswrapper[4632]: I1201 07:12:01.852666 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:12:02 crc kubenswrapper[4632]: I1201 07:12:02.316447 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq"] Dec 01 07:12:02 crc kubenswrapper[4632]: I1201 07:12:02.373664 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" event={"ID":"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5","Type":"ContainerStarted","Data":"ab2e5bb15c3efb7fb5b40a85be7a9e931e37c1aea85391c85e74815ca58dd5af"} Dec 01 07:12:03 crc kubenswrapper[4632]: I1201 07:12:03.383279 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" event={"ID":"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5","Type":"ContainerStarted","Data":"d36746f2fce5923fc7228e3a28edf536676dec2f0ea3035ca648307394efacab"} Dec 01 07:12:03 crc kubenswrapper[4632]: I1201 07:12:03.403795 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" podStartSLOduration=1.818472999 podStartE2EDuration="2.403770019s" podCreationTimestamp="2025-12-01 07:12:01 +0000 UTC" firstStartedPulling="2025-12-01 07:12:02.319650072 +0000 UTC m=+1731.884663045" lastFinishedPulling="2025-12-01 07:12:02.904947091 +0000 UTC m=+1732.469960065" observedRunningTime="2025-12-01 07:12:03.399155367 +0000 UTC m=+1732.964168340" watchObservedRunningTime="2025-12-01 07:12:03.403770019 +0000 UTC m=+1732.968782981" Dec 01 07:12:06 crc kubenswrapper[4632]: I1201 07:12:06.751127 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:12:06 crc kubenswrapper[4632]: E1201 07:12:06.752058 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:12:18 crc kubenswrapper[4632]: I1201 07:12:18.750567 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:12:18 crc kubenswrapper[4632]: E1201 07:12:18.751957 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:12:30 crc kubenswrapper[4632]: I1201 07:12:30.754621 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:12:30 crc kubenswrapper[4632]: E1201 07:12:30.755331 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:12:41 crc kubenswrapper[4632]: I1201 07:12:41.750446 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:12:41 crc kubenswrapper[4632]: E1201 07:12:41.751183 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:12:53 crc kubenswrapper[4632]: I1201 07:12:53.750828 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:12:53 crc kubenswrapper[4632]: E1201 07:12:53.751564 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:13:08 crc kubenswrapper[4632]: I1201 07:13:08.751181 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:13:08 crc kubenswrapper[4632]: E1201 07:13:08.752652 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:13:21 crc kubenswrapper[4632]: I1201 07:13:21.749817 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:13:22 crc kubenswrapper[4632]: I1201 07:13:22.031240 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f"} Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.130018 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g"] Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.131487 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.132884 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.132900 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.136281 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g"] Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.240176 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44fq2\" (UniqueName: \"kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.240260 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.240284 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.342413 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.342453 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.342543 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44fq2\" (UniqueName: \"kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.343141 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.346986 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.355931 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44fq2\" (UniqueName: \"kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2\") pod \"collect-profiles-29409555-nk24g\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.450712 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:00 crc kubenswrapper[4632]: I1201 07:15:00.819758 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g"] Dec 01 07:15:01 crc kubenswrapper[4632]: I1201 07:15:01.740869 4632 generic.go:334] "Generic (PLEG): container finished" podID="8e85ae99-78e9-4d00-a770-145d48a2e251" containerID="317196adc8ff358baf824c3e832034711b9f40e8034bf536640415e0f32c1373" exitCode=0 Dec 01 07:15:01 crc kubenswrapper[4632]: I1201 07:15:01.740913 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" event={"ID":"8e85ae99-78e9-4d00-a770-145d48a2e251","Type":"ContainerDied","Data":"317196adc8ff358baf824c3e832034711b9f40e8034bf536640415e0f32c1373"} Dec 01 07:15:01 crc kubenswrapper[4632]: I1201 07:15:01.741122 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" event={"ID":"8e85ae99-78e9-4d00-a770-145d48a2e251","Type":"ContainerStarted","Data":"2d89b15c8cc36d2a7d975b0a968e26589bc58cb5e3b0039d003ecbcfec8b0d45"} Dec 01 07:15:02 crc kubenswrapper[4632]: I1201 07:15:02.987957 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.084555 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume\") pod \"8e85ae99-78e9-4d00-a770-145d48a2e251\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.085690 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume" (OuterVolumeSpecName: "config-volume") pod "8e85ae99-78e9-4d00-a770-145d48a2e251" (UID: "8e85ae99-78e9-4d00-a770-145d48a2e251"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.085737 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume\") pod \"8e85ae99-78e9-4d00-a770-145d48a2e251\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.085784 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44fq2\" (UniqueName: \"kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2\") pod \"8e85ae99-78e9-4d00-a770-145d48a2e251\" (UID: \"8e85ae99-78e9-4d00-a770-145d48a2e251\") " Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.086695 4632 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8e85ae99-78e9-4d00-a770-145d48a2e251-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.091119 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8e85ae99-78e9-4d00-a770-145d48a2e251" (UID: "8e85ae99-78e9-4d00-a770-145d48a2e251"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.091474 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2" (OuterVolumeSpecName: "kube-api-access-44fq2") pod "8e85ae99-78e9-4d00-a770-145d48a2e251" (UID: "8e85ae99-78e9-4d00-a770-145d48a2e251"). InnerVolumeSpecName "kube-api-access-44fq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.188343 4632 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8e85ae99-78e9-4d00-a770-145d48a2e251-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.188387 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44fq2\" (UniqueName: \"kubernetes.io/projected/8e85ae99-78e9-4d00-a770-145d48a2e251-kube-api-access-44fq2\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.754481 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" event={"ID":"8e85ae99-78e9-4d00-a770-145d48a2e251","Type":"ContainerDied","Data":"2d89b15c8cc36d2a7d975b0a968e26589bc58cb5e3b0039d003ecbcfec8b0d45"} Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.754514 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d89b15c8cc36d2a7d975b0a968e26589bc58cb5e3b0039d003ecbcfec8b0d45" Dec 01 07:15:03 crc kubenswrapper[4632]: I1201 07:15:03.754528 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409555-nk24g" Dec 01 07:15:07 crc kubenswrapper[4632]: I1201 07:15:07.789417 4632 generic.go:334] "Generic (PLEG): container finished" podID="a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" containerID="d36746f2fce5923fc7228e3a28edf536676dec2f0ea3035ca648307394efacab" exitCode=0 Dec 01 07:15:07 crc kubenswrapper[4632]: I1201 07:15:07.789445 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" event={"ID":"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5","Type":"ContainerDied","Data":"d36746f2fce5923fc7228e3a28edf536676dec2f0ea3035ca648307394efacab"} Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.061282 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.166916 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key\") pod \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.166968 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory\") pod \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.167018 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0\") pod \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.167074 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle\") pod \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.167090 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw2zz\" (UniqueName: \"kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz\") pod \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\" (UID: \"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5\") " Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.171006 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" (UID: "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.171099 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz" (OuterVolumeSpecName: "kube-api-access-pw2zz") pod "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" (UID: "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5"). InnerVolumeSpecName "kube-api-access-pw2zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.186890 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory" (OuterVolumeSpecName: "inventory") pod "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" (UID: "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.187540 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" (UID: "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.188658 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" (UID: "a2462a74-2ab0-47cc-9bed-77ce67b0a6c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.269624 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.269653 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.269663 4632 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.269675 4632 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.269684 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw2zz\" (UniqueName: \"kubernetes.io/projected/a2462a74-2ab0-47cc-9bed-77ce67b0a6c5-kube-api-access-pw2zz\") on node \"crc\" DevicePath \"\"" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.803152 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" event={"ID":"a2462a74-2ab0-47cc-9bed-77ce67b0a6c5","Type":"ContainerDied","Data":"ab2e5bb15c3efb7fb5b40a85be7a9e931e37c1aea85391c85e74815ca58dd5af"} Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.803176 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.803187 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab2e5bb15c3efb7fb5b40a85be7a9e931e37c1aea85391c85e74815ca58dd5af" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.860094 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd"] Dec 01 07:15:09 crc kubenswrapper[4632]: E1201 07:15:09.860870 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e85ae99-78e9-4d00-a770-145d48a2e251" containerName="collect-profiles" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.860939 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e85ae99-78e9-4d00-a770-145d48a2e251" containerName="collect-profiles" Dec 01 07:15:09 crc kubenswrapper[4632]: E1201 07:15:09.861015 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.861073 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.861510 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e85ae99-78e9-4d00-a770-145d48a2e251" containerName="collect-profiles" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.861600 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2462a74-2ab0-47cc-9bed-77ce67b0a6c5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.862480 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.868069 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.868203 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.868395 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.869482 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.870119 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.870178 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.870211 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.884022 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd"] Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978195 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978252 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978306 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978326 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978385 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978418 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978452 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978481 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ltmk\" (UniqueName: \"kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:09 crc kubenswrapper[4632]: I1201 07:15:09.978503 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079126 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079373 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ltmk\" (UniqueName: \"kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079404 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079437 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079472 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079527 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079544 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079573 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.079604 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.081333 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.084217 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.084252 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.084321 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.085324 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.085646 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.085705 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.085912 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.091664 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ltmk\" (UniqueName: \"kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk\") pod \"nova-edpm-deployment-openstack-edpm-ipam-t9nbd\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.181475 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.593330 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd"] Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.599675 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:15:10 crc kubenswrapper[4632]: I1201 07:15:10.812076 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" event={"ID":"a4cb69cd-b4b1-4f58-9553-27564432b39c","Type":"ContainerStarted","Data":"22d34bf1f3719c154bfd0a583b3f2223967806d88f23eefe8a625c123b11acd5"} Dec 01 07:15:11 crc kubenswrapper[4632]: I1201 07:15:11.691096 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:15:12 crc kubenswrapper[4632]: I1201 07:15:12.827313 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" event={"ID":"a4cb69cd-b4b1-4f58-9553-27564432b39c","Type":"ContainerStarted","Data":"ccecb515c9c8c7d4e77458fe557e7dc1976dbc686a3dfab6300c98be2baefa1c"} Dec 01 07:15:12 crc kubenswrapper[4632]: I1201 07:15:12.843754 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" podStartSLOduration=2.754418325 podStartE2EDuration="3.843728131s" podCreationTimestamp="2025-12-01 07:15:09 +0000 UTC" firstStartedPulling="2025-12-01 07:15:10.599483888 +0000 UTC m=+1920.164496862" lastFinishedPulling="2025-12-01 07:15:11.688793694 +0000 UTC m=+1921.253806668" observedRunningTime="2025-12-01 07:15:12.841079768 +0000 UTC m=+1922.406092740" watchObservedRunningTime="2025-12-01 07:15:12.843728131 +0000 UTC m=+1922.408741104" Dec 01 07:15:49 crc kubenswrapper[4632]: I1201 07:15:49.498111 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:15:49 crc kubenswrapper[4632]: I1201 07:15:49.498589 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:16:19 crc kubenswrapper[4632]: I1201 07:16:19.498292 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:16:19 crc kubenswrapper[4632]: I1201 07:16:19.498990 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:16:49 crc kubenswrapper[4632]: I1201 07:16:49.498095 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:16:49 crc kubenswrapper[4632]: I1201 07:16:49.498721 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:16:49 crc kubenswrapper[4632]: I1201 07:16:49.498780 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:16:49 crc kubenswrapper[4632]: I1201 07:16:49.499709 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:16:49 crc kubenswrapper[4632]: I1201 07:16:49.499766 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f" gracePeriod=600 Dec 01 07:16:50 crc kubenswrapper[4632]: I1201 07:16:50.496819 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f" exitCode=0 Dec 01 07:16:50 crc kubenswrapper[4632]: I1201 07:16:50.496907 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f"} Dec 01 07:16:50 crc kubenswrapper[4632]: I1201 07:16:50.497478 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b"} Dec 01 07:16:50 crc kubenswrapper[4632]: I1201 07:16:50.497504 4632 scope.go:117] "RemoveContainer" containerID="b61f915d3f1f40a8a720425b00b113eb63fc9d6ec603ebb4719d0b89e965c54d" Dec 01 07:16:59 crc kubenswrapper[4632]: I1201 07:16:59.563055 4632 generic.go:334] "Generic (PLEG): container finished" podID="a4cb69cd-b4b1-4f58-9553-27564432b39c" containerID="ccecb515c9c8c7d4e77458fe557e7dc1976dbc686a3dfab6300c98be2baefa1c" exitCode=0 Dec 01 07:16:59 crc kubenswrapper[4632]: I1201 07:16:59.563104 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" event={"ID":"a4cb69cd-b4b1-4f58-9553-27564432b39c","Type":"ContainerDied","Data":"ccecb515c9c8c7d4e77458fe557e7dc1976dbc686a3dfab6300c98be2baefa1c"} Dec 01 07:17:00 crc kubenswrapper[4632]: I1201 07:17:00.878392 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.046477 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.046633 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.046654 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.046717 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ltmk\" (UniqueName: \"kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.047544 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.047629 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.047714 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.047792 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.047821 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0\") pod \"a4cb69cd-b4b1-4f58-9553-27564432b39c\" (UID: \"a4cb69cd-b4b1-4f58-9553-27564432b39c\") " Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.053592 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk" (OuterVolumeSpecName: "kube-api-access-2ltmk") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "kube-api-access-2ltmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.053960 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.070807 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.072276 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.073285 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.074386 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.074680 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.080056 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory" (OuterVolumeSpecName: "inventory") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.081276 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "a4cb69cd-b4b1-4f58-9553-27564432b39c" (UID: "a4cb69cd-b4b1-4f58-9553-27564432b39c"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150307 4632 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150336 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150346 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150384 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ltmk\" (UniqueName: \"kubernetes.io/projected/a4cb69cd-b4b1-4f58-9553-27564432b39c-kube-api-access-2ltmk\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150396 4632 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150405 4632 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150414 4632 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150422 4632 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.150431 4632 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/a4cb69cd-b4b1-4f58-9553-27564432b39c-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.579213 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" event={"ID":"a4cb69cd-b4b1-4f58-9553-27564432b39c","Type":"ContainerDied","Data":"22d34bf1f3719c154bfd0a583b3f2223967806d88f23eefe8a625c123b11acd5"} Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.579266 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22d34bf1f3719c154bfd0a583b3f2223967806d88f23eefe8a625c123b11acd5" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.579277 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-t9nbd" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.665103 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs"] Dec 01 07:17:01 crc kubenswrapper[4632]: E1201 07:17:01.665543 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cb69cd-b4b1-4f58-9553-27564432b39c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.665562 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cb69cd-b4b1-4f58-9553-27564432b39c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.665734 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cb69cd-b4b1-4f58-9553-27564432b39c" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.666400 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.669326 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.669409 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-l6tzc" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.669434 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.669337 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.669711 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.674790 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs"] Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761481 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761533 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761576 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761624 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761757 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761893 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96dlb\" (UniqueName: \"kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.761941 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863536 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96dlb\" (UniqueName: \"kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863590 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863672 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863693 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863720 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863750 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.863773 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.867822 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.868205 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.868210 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.868727 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.869236 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.869795 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.879862 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96dlb\" (UniqueName: \"kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-42lvs\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:01 crc kubenswrapper[4632]: I1201 07:17:01.983737 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:17:02 crc kubenswrapper[4632]: I1201 07:17:02.438834 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs"] Dec 01 07:17:02 crc kubenswrapper[4632]: I1201 07:17:02.587506 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" event={"ID":"eb82c6b3-a652-4d30-a8c9-63f6878557cc","Type":"ContainerStarted","Data":"a0cbf882f5c61726f90654b3715e15cf07341b6e414de1752d130ac8b5e9bb6f"} Dec 01 07:17:03 crc kubenswrapper[4632]: I1201 07:17:03.596851 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" event={"ID":"eb82c6b3-a652-4d30-a8c9-63f6878557cc","Type":"ContainerStarted","Data":"278fb3052bd950e1e44ef01bd6afe81ff990eb069a13db58bc0804140422570c"} Dec 01 07:17:03 crc kubenswrapper[4632]: I1201 07:17:03.610924 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" podStartSLOduration=2.008666619 podStartE2EDuration="2.610896006s" podCreationTimestamp="2025-12-01 07:17:01 +0000 UTC" firstStartedPulling="2025-12-01 07:17:02.441050101 +0000 UTC m=+2032.006063074" lastFinishedPulling="2025-12-01 07:17:03.043279488 +0000 UTC m=+2032.608292461" observedRunningTime="2025-12-01 07:17:03.609208745 +0000 UTC m=+2033.174221719" watchObservedRunningTime="2025-12-01 07:17:03.610896006 +0000 UTC m=+2033.175908979" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.397474 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.399635 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.407081 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.464631 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.464673 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.464746 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bxxg\" (UniqueName: \"kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.568165 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.568258 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.568409 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bxxg\" (UniqueName: \"kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.568767 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.568794 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.587146 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bxxg\" (UniqueName: \"kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg\") pod \"redhat-marketplace-g9sdt\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:44 crc kubenswrapper[4632]: I1201 07:17:44.725704 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:45 crc kubenswrapper[4632]: I1201 07:17:45.148932 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:45 crc kubenswrapper[4632]: I1201 07:17:45.935121 4632 generic.go:334] "Generic (PLEG): container finished" podID="7e879f6b-9569-49dd-990c-f441ec728dce" containerID="1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879" exitCode=0 Dec 01 07:17:45 crc kubenswrapper[4632]: I1201 07:17:45.935239 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerDied","Data":"1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879"} Dec 01 07:17:45 crc kubenswrapper[4632]: I1201 07:17:45.935588 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerStarted","Data":"2e95bcbe8607bad61009034d406907de3ed3c2f610df0c721588d7ba0cb54a40"} Dec 01 07:17:46 crc kubenswrapper[4632]: I1201 07:17:46.944527 4632 generic.go:334] "Generic (PLEG): container finished" podID="7e879f6b-9569-49dd-990c-f441ec728dce" containerID="188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67" exitCode=0 Dec 01 07:17:46 crc kubenswrapper[4632]: I1201 07:17:46.944717 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerDied","Data":"188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67"} Dec 01 07:17:47 crc kubenswrapper[4632]: I1201 07:17:47.955038 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerStarted","Data":"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b"} Dec 01 07:17:47 crc kubenswrapper[4632]: I1201 07:17:47.975584 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g9sdt" podStartSLOduration=2.397598025 podStartE2EDuration="3.975565052s" podCreationTimestamp="2025-12-01 07:17:44 +0000 UTC" firstStartedPulling="2025-12-01 07:17:45.938096045 +0000 UTC m=+2075.503109018" lastFinishedPulling="2025-12-01 07:17:47.516063072 +0000 UTC m=+2077.081076045" observedRunningTime="2025-12-01 07:17:47.971422193 +0000 UTC m=+2077.536435166" watchObservedRunningTime="2025-12-01 07:17:47.975565052 +0000 UTC m=+2077.540578026" Dec 01 07:17:54 crc kubenswrapper[4632]: I1201 07:17:54.726616 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:54 crc kubenswrapper[4632]: I1201 07:17:54.727311 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:54 crc kubenswrapper[4632]: I1201 07:17:54.764278 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:55 crc kubenswrapper[4632]: I1201 07:17:55.042270 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:55 crc kubenswrapper[4632]: I1201 07:17:55.081184 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.030801 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g9sdt" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="registry-server" containerID="cri-o://4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b" gracePeriod=2 Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.384601 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.550136 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bxxg\" (UniqueName: \"kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg\") pod \"7e879f6b-9569-49dd-990c-f441ec728dce\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.550228 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content\") pod \"7e879f6b-9569-49dd-990c-f441ec728dce\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.550400 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities\") pod \"7e879f6b-9569-49dd-990c-f441ec728dce\" (UID: \"7e879f6b-9569-49dd-990c-f441ec728dce\") " Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.551071 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities" (OuterVolumeSpecName: "utilities") pod "7e879f6b-9569-49dd-990c-f441ec728dce" (UID: "7e879f6b-9569-49dd-990c-f441ec728dce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.555721 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg" (OuterVolumeSpecName: "kube-api-access-9bxxg") pod "7e879f6b-9569-49dd-990c-f441ec728dce" (UID: "7e879f6b-9569-49dd-990c-f441ec728dce"). InnerVolumeSpecName "kube-api-access-9bxxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.563782 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7e879f6b-9569-49dd-990c-f441ec728dce" (UID: "7e879f6b-9569-49dd-990c-f441ec728dce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.652417 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.652445 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bxxg\" (UniqueName: \"kubernetes.io/projected/7e879f6b-9569-49dd-990c-f441ec728dce-kube-api-access-9bxxg\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:57 crc kubenswrapper[4632]: I1201 07:17:57.652455 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7e879f6b-9569-49dd-990c-f441ec728dce-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.040004 4632 generic.go:334] "Generic (PLEG): container finished" podID="7e879f6b-9569-49dd-990c-f441ec728dce" containerID="4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b" exitCode=0 Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.040048 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerDied","Data":"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b"} Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.040082 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g9sdt" event={"ID":"7e879f6b-9569-49dd-990c-f441ec728dce","Type":"ContainerDied","Data":"2e95bcbe8607bad61009034d406907de3ed3c2f610df0c721588d7ba0cb54a40"} Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.040099 4632 scope.go:117] "RemoveContainer" containerID="4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.040189 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g9sdt" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.064028 4632 scope.go:117] "RemoveContainer" containerID="188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.074342 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.084186 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g9sdt"] Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.093193 4632 scope.go:117] "RemoveContainer" containerID="1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.122079 4632 scope.go:117] "RemoveContainer" containerID="4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b" Dec 01 07:17:58 crc kubenswrapper[4632]: E1201 07:17:58.122531 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b\": container with ID starting with 4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b not found: ID does not exist" containerID="4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.122567 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b"} err="failed to get container status \"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b\": rpc error: code = NotFound desc = could not find container \"4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b\": container with ID starting with 4533c70288704d954a4affae7dcb8da278cef05806ad51b59136713858a30c9b not found: ID does not exist" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.122587 4632 scope.go:117] "RemoveContainer" containerID="188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67" Dec 01 07:17:58 crc kubenswrapper[4632]: E1201 07:17:58.122868 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67\": container with ID starting with 188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67 not found: ID does not exist" containerID="188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.122892 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67"} err="failed to get container status \"188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67\": rpc error: code = NotFound desc = could not find container \"188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67\": container with ID starting with 188a6baa7eb6b104e89973851a96e699f91556605698d634fede8a2b3d101a67 not found: ID does not exist" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.122908 4632 scope.go:117] "RemoveContainer" containerID="1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879" Dec 01 07:17:58 crc kubenswrapper[4632]: E1201 07:17:58.123172 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879\": container with ID starting with 1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879 not found: ID does not exist" containerID="1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.123193 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879"} err="failed to get container status \"1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879\": rpc error: code = NotFound desc = could not find container \"1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879\": container with ID starting with 1c1221564d45e2b987e9c77feb11a1eb562f36bca89e1107b4d7533b73866879 not found: ID does not exist" Dec 01 07:17:58 crc kubenswrapper[4632]: I1201 07:17:58.758802 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" path="/var/lib/kubelet/pods/7e879f6b-9569-49dd-990c-f441ec728dce/volumes" Dec 01 07:18:37 crc kubenswrapper[4632]: I1201 07:18:37.337838 4632 generic.go:334] "Generic (PLEG): container finished" podID="eb82c6b3-a652-4d30-a8c9-63f6878557cc" containerID="278fb3052bd950e1e44ef01bd6afe81ff990eb069a13db58bc0804140422570c" exitCode=0 Dec 01 07:18:37 crc kubenswrapper[4632]: I1201 07:18:37.337919 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" event={"ID":"eb82c6b3-a652-4d30-a8c9-63f6878557cc","Type":"ContainerDied","Data":"278fb3052bd950e1e44ef01bd6afe81ff990eb069a13db58bc0804140422570c"} Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.644124 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.799764 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.799843 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96dlb\" (UniqueName: \"kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.799981 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.800005 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.800061 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.800095 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.800112 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle\") pod \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\" (UID: \"eb82c6b3-a652-4d30-a8c9-63f6878557cc\") " Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.809339 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb" (OuterVolumeSpecName: "kube-api-access-96dlb") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "kube-api-access-96dlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.809699 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.821555 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.822907 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory" (OuterVolumeSpecName: "inventory") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.823246 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.824116 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.828710 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "eb82c6b3-a652-4d30-a8c9-63f6878557cc" (UID: "eb82c6b3-a652-4d30-a8c9-63f6878557cc"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902432 4632 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902462 4632 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-inventory\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902472 4632 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902483 4632 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902492 4632 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902500 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eb82c6b3-a652-4d30-a8c9-63f6878557cc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:38 crc kubenswrapper[4632]: I1201 07:18:38.902508 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96dlb\" (UniqueName: \"kubernetes.io/projected/eb82c6b3-a652-4d30-a8c9-63f6878557cc-kube-api-access-96dlb\") on node \"crc\" DevicePath \"\"" Dec 01 07:18:39 crc kubenswrapper[4632]: I1201 07:18:39.352837 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" event={"ID":"eb82c6b3-a652-4d30-a8c9-63f6878557cc","Type":"ContainerDied","Data":"a0cbf882f5c61726f90654b3715e15cf07341b6e414de1752d130ac8b5e9bb6f"} Dec 01 07:18:39 crc kubenswrapper[4632]: I1201 07:18:39.353082 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-42lvs" Dec 01 07:18:39 crc kubenswrapper[4632]: I1201 07:18:39.353101 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0cbf882f5c61726f90654b3715e15cf07341b6e414de1752d130ac8b5e9bb6f" Dec 01 07:18:49 crc kubenswrapper[4632]: I1201 07:18:49.498199 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:18:49 crc kubenswrapper[4632]: I1201 07:18:49.498909 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.866795 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 07:19:11 crc kubenswrapper[4632]: E1201 07:19:11.867860 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb82c6b3-a652-4d30-a8c9-63f6878557cc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.867876 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb82c6b3-a652-4d30-a8c9-63f6878557cc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 07:19:11 crc kubenswrapper[4632]: E1201 07:19:11.867905 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="extract-utilities" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.867911 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="extract-utilities" Dec 01 07:19:11 crc kubenswrapper[4632]: E1201 07:19:11.867928 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="extract-content" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.867937 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="extract-content" Dec 01 07:19:11 crc kubenswrapper[4632]: E1201 07:19:11.867955 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="registry-server" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.867961 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="registry-server" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.868190 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb82c6b3-a652-4d30-a8c9-63f6878557cc" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.868211 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e879f6b-9569-49dd-990c-f441ec728dce" containerName="registry-server" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.869026 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.872636 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xhq49" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.872838 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.873025 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.873618 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 01 07:19:11 crc kubenswrapper[4632]: I1201 07:19:11.880035 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063306 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063383 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063415 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063440 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063462 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063529 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063558 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063583 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5slj\" (UniqueName: \"kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.063603 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.164971 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165025 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165064 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5slj\" (UniqueName: \"kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165084 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165127 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165169 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165192 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165211 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165235 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.165909 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.166165 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.166608 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.166612 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.167665 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.171773 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.172272 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.172662 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.179077 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5slj\" (UniqueName: \"kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.186020 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.484585 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 07:19:12 crc kubenswrapper[4632]: I1201 07:19:12.870706 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 01 07:19:12 crc kubenswrapper[4632]: W1201 07:19:12.872898 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod713aa788_d673_4113_93f4_760c3d3714cc.slice/crio-65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180 WatchSource:0}: Error finding container 65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180: Status 404 returned error can't find the container with id 65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180 Dec 01 07:19:13 crc kubenswrapper[4632]: I1201 07:19:13.592187 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"713aa788-d673-4113-93f4-760c3d3714cc","Type":"ContainerStarted","Data":"65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180"} Dec 01 07:19:19 crc kubenswrapper[4632]: I1201 07:19:19.498264 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:19:19 crc kubenswrapper[4632]: I1201 07:19:19.498857 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.481184 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.483751 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.493898 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.507676 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.507794 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.507863 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f45sp\" (UniqueName: \"kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.609690 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.609788 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.609818 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f45sp\" (UniqueName: \"kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.610142 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.610220 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.628413 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f45sp\" (UniqueName: \"kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp\") pod \"redhat-operators-8rcf4\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:41 crc kubenswrapper[4632]: I1201 07:19:41.810477 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:19:42 crc kubenswrapper[4632]: I1201 07:19:42.245365 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.498008 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.498698 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.498749 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.499311 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.499379 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" gracePeriod=600 Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.878834 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" exitCode=0 Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.878919 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b"} Dec 01 07:19:49 crc kubenswrapper[4632]: I1201 07:19:49.878986 4632 scope.go:117] "RemoveContainer" containerID="0037ec22086b1f59aa504947ce2ddb7890c5d4ea1ee28fcc11fcf7a92578eb2f" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.419804 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.465652 4632 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.465893 4632 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.466010 4632 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b5slj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(713aa788-d673-4113-93f4-760c3d3714cc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.467879 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="713aa788-d673-4113-93f4-760c3d3714cc" Dec 01 07:19:55 crc kubenswrapper[4632]: I1201 07:19:55.928682 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.928984 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:19:55 crc kubenswrapper[4632]: I1201 07:19:55.929599 4632 generic.go:334] "Generic (PLEG): container finished" podID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerID="08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90" exitCode=0 Dec 01 07:19:55 crc kubenswrapper[4632]: I1201 07:19:55.929625 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerDied","Data":"08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90"} Dec 01 07:19:55 crc kubenswrapper[4632]: I1201 07:19:55.929661 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerStarted","Data":"73637e2f3a6a1c4cd4e6d343648cf17586b6887b578499a8900a114be304271f"} Dec 01 07:19:55 crc kubenswrapper[4632]: E1201 07:19:55.930670 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-antelope-centos9/openstack-tempest-all:fa2bb8efef6782c26ea7f1675eeb36dd\\\"\"" pod="openstack/tempest-tests-tempest" podUID="713aa788-d673-4113-93f4-760c3d3714cc" Dec 01 07:19:57 crc kubenswrapper[4632]: I1201 07:19:57.947184 4632 generic.go:334] "Generic (PLEG): container finished" podID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerID="945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e" exitCode=0 Dec 01 07:19:57 crc kubenswrapper[4632]: I1201 07:19:57.947249 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerDied","Data":"945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e"} Dec 01 07:19:58 crc kubenswrapper[4632]: I1201 07:19:58.971956 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerStarted","Data":"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe"} Dec 01 07:19:58 crc kubenswrapper[4632]: I1201 07:19:58.992068 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8rcf4" podStartSLOduration=15.428726348 podStartE2EDuration="17.992054133s" podCreationTimestamp="2025-12-01 07:19:41 +0000 UTC" firstStartedPulling="2025-12-01 07:19:55.930732922 +0000 UTC m=+2205.495745895" lastFinishedPulling="2025-12-01 07:19:58.494060706 +0000 UTC m=+2208.059073680" observedRunningTime="2025-12-01 07:19:58.989197463 +0000 UTC m=+2208.554210436" watchObservedRunningTime="2025-12-01 07:19:58.992054133 +0000 UTC m=+2208.557067106" Dec 01 07:20:01 crc kubenswrapper[4632]: I1201 07:20:01.811450 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:01 crc kubenswrapper[4632]: I1201 07:20:01.811648 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:02 crc kubenswrapper[4632]: I1201 07:20:02.845304 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8rcf4" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="registry-server" probeResult="failure" output=< Dec 01 07:20:02 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 07:20:02 crc kubenswrapper[4632]: > Dec 01 07:20:08 crc kubenswrapper[4632]: I1201 07:20:08.751073 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:20:08 crc kubenswrapper[4632]: E1201 07:20:08.751814 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:20:11 crc kubenswrapper[4632]: I1201 07:20:11.056709 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"713aa788-d673-4113-93f4-760c3d3714cc","Type":"ContainerStarted","Data":"6b128aa041833ad6e08678378ca455f37f6e0b192318c6b29d7015ce8d489333"} Dec 01 07:20:11 crc kubenswrapper[4632]: I1201 07:20:11.074713 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.567703324 podStartE2EDuration="1m1.074698688s" podCreationTimestamp="2025-12-01 07:19:10 +0000 UTC" firstStartedPulling="2025-12-01 07:19:12.876491245 +0000 UTC m=+2162.441504218" lastFinishedPulling="2025-12-01 07:20:09.38348661 +0000 UTC m=+2218.948499582" observedRunningTime="2025-12-01 07:20:11.070424204 +0000 UTC m=+2220.635437178" watchObservedRunningTime="2025-12-01 07:20:11.074698688 +0000 UTC m=+2220.639711662" Dec 01 07:20:11 crc kubenswrapper[4632]: I1201 07:20:11.866072 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:11 crc kubenswrapper[4632]: I1201 07:20:11.906803 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:12 crc kubenswrapper[4632]: I1201 07:20:12.667433 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.073299 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8rcf4" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="registry-server" containerID="cri-o://1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe" gracePeriod=2 Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.452334 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.604233 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content\") pod \"1b80c025-b51c-4fb4-a768-a1731c31ce87\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.604299 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities\") pod \"1b80c025-b51c-4fb4-a768-a1731c31ce87\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.604425 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f45sp\" (UniqueName: \"kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp\") pod \"1b80c025-b51c-4fb4-a768-a1731c31ce87\" (UID: \"1b80c025-b51c-4fb4-a768-a1731c31ce87\") " Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.605254 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities" (OuterVolumeSpecName: "utilities") pod "1b80c025-b51c-4fb4-a768-a1731c31ce87" (UID: "1b80c025-b51c-4fb4-a768-a1731c31ce87"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.610595 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp" (OuterVolumeSpecName: "kube-api-access-f45sp") pod "1b80c025-b51c-4fb4-a768-a1731c31ce87" (UID: "1b80c025-b51c-4fb4-a768-a1731c31ce87"). InnerVolumeSpecName "kube-api-access-f45sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.685222 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1b80c025-b51c-4fb4-a768-a1731c31ce87" (UID: "1b80c025-b51c-4fb4-a768-a1731c31ce87"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.706379 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.706407 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1b80c025-b51c-4fb4-a768-a1731c31ce87-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:13 crc kubenswrapper[4632]: I1201 07:20:13.706417 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f45sp\" (UniqueName: \"kubernetes.io/projected/1b80c025-b51c-4fb4-a768-a1731c31ce87-kube-api-access-f45sp\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.085578 4632 generic.go:334] "Generic (PLEG): container finished" podID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerID="1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe" exitCode=0 Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.085629 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerDied","Data":"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe"} Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.085849 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8rcf4" event={"ID":"1b80c025-b51c-4fb4-a768-a1731c31ce87","Type":"ContainerDied","Data":"73637e2f3a6a1c4cd4e6d343648cf17586b6887b578499a8900a114be304271f"} Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.085876 4632 scope.go:117] "RemoveContainer" containerID="1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.085675 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8rcf4" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.111131 4632 scope.go:117] "RemoveContainer" containerID="945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.117343 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.123161 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8rcf4"] Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.146719 4632 scope.go:117] "RemoveContainer" containerID="08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.166704 4632 scope.go:117] "RemoveContainer" containerID="1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe" Dec 01 07:20:14 crc kubenswrapper[4632]: E1201 07:20:14.167043 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe\": container with ID starting with 1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe not found: ID does not exist" containerID="1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.167080 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe"} err="failed to get container status \"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe\": rpc error: code = NotFound desc = could not find container \"1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe\": container with ID starting with 1ac0768b97567c3034fd8f93bc4cf3c0f65a3dbbef1f7e3e690a6acfad0b74fe not found: ID does not exist" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.167115 4632 scope.go:117] "RemoveContainer" containerID="945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e" Dec 01 07:20:14 crc kubenswrapper[4632]: E1201 07:20:14.167407 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e\": container with ID starting with 945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e not found: ID does not exist" containerID="945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.167430 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e"} err="failed to get container status \"945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e\": rpc error: code = NotFound desc = could not find container \"945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e\": container with ID starting with 945b4b65dfe8e954447c4b2a904e1bc89e1f38afff3ba9a1b1350a91e6136a1e not found: ID does not exist" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.167445 4632 scope.go:117] "RemoveContainer" containerID="08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90" Dec 01 07:20:14 crc kubenswrapper[4632]: E1201 07:20:14.167913 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90\": container with ID starting with 08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90 not found: ID does not exist" containerID="08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.167958 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90"} err="failed to get container status \"08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90\": rpc error: code = NotFound desc = could not find container \"08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90\": container with ID starting with 08a3ad8b5685e2377d5b3b0530c1ddf76860b43903f2c5ece194a4c8ad9efd90 not found: ID does not exist" Dec 01 07:20:14 crc kubenswrapper[4632]: I1201 07:20:14.759417 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" path="/var/lib/kubelet/pods/1b80c025-b51c-4fb4-a768-a1731c31ce87/volumes" Dec 01 07:20:22 crc kubenswrapper[4632]: I1201 07:20:22.750876 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:20:22 crc kubenswrapper[4632]: E1201 07:20:22.751745 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:20:36 crc kubenswrapper[4632]: I1201 07:20:36.750883 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:20:36 crc kubenswrapper[4632]: E1201 07:20:36.753257 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.649983 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:39 crc kubenswrapper[4632]: E1201 07:20:39.651282 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="extract-content" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.651297 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="extract-content" Dec 01 07:20:39 crc kubenswrapper[4632]: E1201 07:20:39.651310 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="registry-server" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.651317 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="registry-server" Dec 01 07:20:39 crc kubenswrapper[4632]: E1201 07:20:39.651328 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="extract-utilities" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.651333 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="extract-utilities" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.651625 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b80c025-b51c-4fb4-a768-a1731c31ce87" containerName="registry-server" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.653068 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.661343 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.795600 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.795665 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.795949 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7vkj\" (UniqueName: \"kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.897885 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7vkj\" (UniqueName: \"kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.898089 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.898117 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.898702 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.898780 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.917970 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7vkj\" (UniqueName: \"kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj\") pod \"certified-operators-r2xhf\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:39 crc kubenswrapper[4632]: I1201 07:20:39.970656 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.045618 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.047374 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.053537 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.203566 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.203954 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.204262 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfdlp\" (UniqueName: \"kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.305954 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.306038 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.306249 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfdlp\" (UniqueName: \"kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.306725 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.306750 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.328066 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfdlp\" (UniqueName: \"kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp\") pod \"community-operators-ccrtt\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.369153 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.499392 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:40 crc kubenswrapper[4632]: W1201 07:20:40.793963 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad541747_dff5_46e8_afbb_de073004ab7d.slice/crio-42c8f62504375c57f799d4fc6bc2cc689cff15e6ce79a78b0da829d663b449f9 WatchSource:0}: Error finding container 42c8f62504375c57f799d4fc6bc2cc689cff15e6ce79a78b0da829d663b449f9: Status 404 returned error can't find the container with id 42c8f62504375c57f799d4fc6bc2cc689cff15e6ce79a78b0da829d663b449f9 Dec 01 07:20:40 crc kubenswrapper[4632]: I1201 07:20:40.801929 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.321862 4632 generic.go:334] "Generic (PLEG): container finished" podID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerID="15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11" exitCode=0 Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.321913 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerDied","Data":"15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11"} Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.322230 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerStarted","Data":"0c47290e701a1920eff2c6760717303bb7315e04b9fdea0adc37fc72fb7184f3"} Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.323791 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.324161 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerDied","Data":"88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a"} Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.324548 4632 generic.go:334] "Generic (PLEG): container finished" podID="ad541747-dff5-46e8-afbb-de073004ab7d" containerID="88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a" exitCode=0 Dec 01 07:20:41 crc kubenswrapper[4632]: I1201 07:20:41.324596 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerStarted","Data":"42c8f62504375c57f799d4fc6bc2cc689cff15e6ce79a78b0da829d663b449f9"} Dec 01 07:20:42 crc kubenswrapper[4632]: I1201 07:20:42.337536 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerStarted","Data":"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c"} Dec 01 07:20:43 crc kubenswrapper[4632]: I1201 07:20:43.346220 4632 generic.go:334] "Generic (PLEG): container finished" podID="ad541747-dff5-46e8-afbb-de073004ab7d" containerID="e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af" exitCode=0 Dec 01 07:20:43 crc kubenswrapper[4632]: I1201 07:20:43.346322 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerDied","Data":"e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af"} Dec 01 07:20:43 crc kubenswrapper[4632]: I1201 07:20:43.350347 4632 generic.go:334] "Generic (PLEG): container finished" podID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerID="3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c" exitCode=0 Dec 01 07:20:43 crc kubenswrapper[4632]: I1201 07:20:43.350399 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerDied","Data":"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c"} Dec 01 07:20:44 crc kubenswrapper[4632]: I1201 07:20:44.366000 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerStarted","Data":"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f"} Dec 01 07:20:44 crc kubenswrapper[4632]: I1201 07:20:44.369688 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerStarted","Data":"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364"} Dec 01 07:20:44 crc kubenswrapper[4632]: I1201 07:20:44.394080 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ccrtt" podStartSLOduration=1.8831849410000001 podStartE2EDuration="4.394065509s" podCreationTimestamp="2025-12-01 07:20:40 +0000 UTC" firstStartedPulling="2025-12-01 07:20:41.325007257 +0000 UTC m=+2250.890020230" lastFinishedPulling="2025-12-01 07:20:43.835887826 +0000 UTC m=+2253.400900798" observedRunningTime="2025-12-01 07:20:44.394030773 +0000 UTC m=+2253.959043746" watchObservedRunningTime="2025-12-01 07:20:44.394065509 +0000 UTC m=+2253.959078482" Dec 01 07:20:44 crc kubenswrapper[4632]: I1201 07:20:44.427775 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-r2xhf" podStartSLOduration=2.859231799 podStartE2EDuration="5.42776137s" podCreationTimestamp="2025-12-01 07:20:39 +0000 UTC" firstStartedPulling="2025-12-01 07:20:41.32356156 +0000 UTC m=+2250.888574533" lastFinishedPulling="2025-12-01 07:20:43.89209113 +0000 UTC m=+2253.457104104" observedRunningTime="2025-12-01 07:20:44.425781791 +0000 UTC m=+2253.990794764" watchObservedRunningTime="2025-12-01 07:20:44.42776137 +0000 UTC m=+2253.992774344" Dec 01 07:20:48 crc kubenswrapper[4632]: I1201 07:20:48.750569 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:20:48 crc kubenswrapper[4632]: E1201 07:20:48.751512 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:20:49 crc kubenswrapper[4632]: I1201 07:20:49.971577 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:49 crc kubenswrapper[4632]: I1201 07:20:49.971843 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.015721 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.370763 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.370806 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.406144 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.462598 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:50 crc kubenswrapper[4632]: I1201 07:20:50.463143 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:51 crc kubenswrapper[4632]: I1201 07:20:51.439302 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.433004 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-r2xhf" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="registry-server" containerID="cri-o://8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364" gracePeriod=2 Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.856932 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.874270 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities\") pod \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.874438 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content\") pod \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.874480 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7vkj\" (UniqueName: \"kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj\") pod \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\" (UID: \"c746b5ce-886f-47b9-8efb-7222aeb9f04d\") " Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.875868 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities" (OuterVolumeSpecName: "utilities") pod "c746b5ce-886f-47b9-8efb-7222aeb9f04d" (UID: "c746b5ce-886f-47b9-8efb-7222aeb9f04d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.880721 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj" (OuterVolumeSpecName: "kube-api-access-s7vkj") pod "c746b5ce-886f-47b9-8efb-7222aeb9f04d" (UID: "c746b5ce-886f-47b9-8efb-7222aeb9f04d"). InnerVolumeSpecName "kube-api-access-s7vkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.917775 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c746b5ce-886f-47b9-8efb-7222aeb9f04d" (UID: "c746b5ce-886f-47b9-8efb-7222aeb9f04d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.976936 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.976964 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c746b5ce-886f-47b9-8efb-7222aeb9f04d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:52 crc kubenswrapper[4632]: I1201 07:20:52.976979 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7vkj\" (UniqueName: \"kubernetes.io/projected/c746b5ce-886f-47b9-8efb-7222aeb9f04d-kube-api-access-s7vkj\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.439107 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.442817 4632 generic.go:334] "Generic (PLEG): container finished" podID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerID="8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364" exitCode=0 Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.442852 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerDied","Data":"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364"} Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.442913 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-r2xhf" event={"ID":"c746b5ce-886f-47b9-8efb-7222aeb9f04d","Type":"ContainerDied","Data":"0c47290e701a1920eff2c6760717303bb7315e04b9fdea0adc37fc72fb7184f3"} Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.442950 4632 scope.go:117] "RemoveContainer" containerID="8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.443007 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ccrtt" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="registry-server" containerID="cri-o://7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f" gracePeriod=2 Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.443131 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-r2xhf" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.463757 4632 scope.go:117] "RemoveContainer" containerID="3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.482178 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.489142 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-r2xhf"] Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.509545 4632 scope.go:117] "RemoveContainer" containerID="15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.666430 4632 scope.go:117] "RemoveContainer" containerID="8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364" Dec 01 07:20:53 crc kubenswrapper[4632]: E1201 07:20:53.666991 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364\": container with ID starting with 8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364 not found: ID does not exist" containerID="8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.667044 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364"} err="failed to get container status \"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364\": rpc error: code = NotFound desc = could not find container \"8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364\": container with ID starting with 8ad2c0b96ccbd5f926b4c5df85214e08541b6d73a41640c1e6308ffe783c5364 not found: ID does not exist" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.667069 4632 scope.go:117] "RemoveContainer" containerID="3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c" Dec 01 07:20:53 crc kubenswrapper[4632]: E1201 07:20:53.667806 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c\": container with ID starting with 3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c not found: ID does not exist" containerID="3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.667838 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c"} err="failed to get container status \"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c\": rpc error: code = NotFound desc = could not find container \"3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c\": container with ID starting with 3b48c0f9c612e9ac39378bbc1122c3b144b8edc25c5b7efc05e607ccffac665c not found: ID does not exist" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.667855 4632 scope.go:117] "RemoveContainer" containerID="15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11" Dec 01 07:20:53 crc kubenswrapper[4632]: E1201 07:20:53.668192 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11\": container with ID starting with 15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11 not found: ID does not exist" containerID="15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.668236 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11"} err="failed to get container status \"15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11\": rpc error: code = NotFound desc = could not find container \"15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11\": container with ID starting with 15c4c463966aad54c2a77002aee0391094d856c53e23cd953979f3dc11c19f11 not found: ID does not exist" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.840028 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.997316 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfdlp\" (UniqueName: \"kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp\") pod \"ad541747-dff5-46e8-afbb-de073004ab7d\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.997403 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content\") pod \"ad541747-dff5-46e8-afbb-de073004ab7d\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.997460 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities\") pod \"ad541747-dff5-46e8-afbb-de073004ab7d\" (UID: \"ad541747-dff5-46e8-afbb-de073004ab7d\") " Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.997872 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities" (OuterVolumeSpecName: "utilities") pod "ad541747-dff5-46e8-afbb-de073004ab7d" (UID: "ad541747-dff5-46e8-afbb-de073004ab7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:53 crc kubenswrapper[4632]: I1201 07:20:53.998187 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.003381 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp" (OuterVolumeSpecName: "kube-api-access-lfdlp") pod "ad541747-dff5-46e8-afbb-de073004ab7d" (UID: "ad541747-dff5-46e8-afbb-de073004ab7d"). InnerVolumeSpecName "kube-api-access-lfdlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.033557 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad541747-dff5-46e8-afbb-de073004ab7d" (UID: "ad541747-dff5-46e8-afbb-de073004ab7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.101069 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfdlp\" (UniqueName: \"kubernetes.io/projected/ad541747-dff5-46e8-afbb-de073004ab7d-kube-api-access-lfdlp\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.101119 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad541747-dff5-46e8-afbb-de073004ab7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.454020 4632 generic.go:334] "Generic (PLEG): container finished" podID="ad541747-dff5-46e8-afbb-de073004ab7d" containerID="7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f" exitCode=0 Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.454094 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerDied","Data":"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f"} Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.454136 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ccrtt" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.454204 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ccrtt" event={"ID":"ad541747-dff5-46e8-afbb-de073004ab7d","Type":"ContainerDied","Data":"42c8f62504375c57f799d4fc6bc2cc689cff15e6ce79a78b0da829d663b449f9"} Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.454239 4632 scope.go:117] "RemoveContainer" containerID="7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.473522 4632 scope.go:117] "RemoveContainer" containerID="e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.491660 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.500060 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ccrtt"] Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.504924 4632 scope.go:117] "RemoveContainer" containerID="88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.520490 4632 scope.go:117] "RemoveContainer" containerID="7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f" Dec 01 07:20:54 crc kubenswrapper[4632]: E1201 07:20:54.520971 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f\": container with ID starting with 7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f not found: ID does not exist" containerID="7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.521018 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f"} err="failed to get container status \"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f\": rpc error: code = NotFound desc = could not find container \"7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f\": container with ID starting with 7d48b0e200079c25497d68b2dacf28af90a7652eb0a8821be44f5f7e5c76945f not found: ID does not exist" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.521050 4632 scope.go:117] "RemoveContainer" containerID="e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af" Dec 01 07:20:54 crc kubenswrapper[4632]: E1201 07:20:54.521440 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af\": container with ID starting with e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af not found: ID does not exist" containerID="e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.521493 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af"} err="failed to get container status \"e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af\": rpc error: code = NotFound desc = could not find container \"e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af\": container with ID starting with e121e761b44ded58d8913289fc24c72896dbdbb9818b0929f19f68ba2249e5af not found: ID does not exist" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.521534 4632 scope.go:117] "RemoveContainer" containerID="88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a" Dec 01 07:20:54 crc kubenswrapper[4632]: E1201 07:20:54.521888 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a\": container with ID starting with 88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a not found: ID does not exist" containerID="88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.521929 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a"} err="failed to get container status \"88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a\": rpc error: code = NotFound desc = could not find container \"88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a\": container with ID starting with 88a439eb142a8e345d88f4426994bbb2516945baa56f6589e40f4e8e62ad700a not found: ID does not exist" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.759748 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" path="/var/lib/kubelet/pods/ad541747-dff5-46e8-afbb-de073004ab7d/volumes" Dec 01 07:20:54 crc kubenswrapper[4632]: I1201 07:20:54.760450 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" path="/var/lib/kubelet/pods/c746b5ce-886f-47b9-8efb-7222aeb9f04d/volumes" Dec 01 07:21:03 crc kubenswrapper[4632]: I1201 07:21:03.750463 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:21:03 crc kubenswrapper[4632]: E1201 07:21:03.751100 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:21:18 crc kubenswrapper[4632]: I1201 07:21:18.750732 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:21:18 crc kubenswrapper[4632]: E1201 07:21:18.751243 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:21:33 crc kubenswrapper[4632]: I1201 07:21:33.750203 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:21:33 crc kubenswrapper[4632]: E1201 07:21:33.750761 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:21:46 crc kubenswrapper[4632]: I1201 07:21:46.750458 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:21:46 crc kubenswrapper[4632]: E1201 07:21:46.750996 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:21:57 crc kubenswrapper[4632]: I1201 07:21:57.750864 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:21:57 crc kubenswrapper[4632]: E1201 07:21:57.751343 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:22:12 crc kubenswrapper[4632]: I1201 07:22:12.750374 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:22:12 crc kubenswrapper[4632]: E1201 07:22:12.751184 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:22:26 crc kubenswrapper[4632]: I1201 07:22:26.750493 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:22:26 crc kubenswrapper[4632]: E1201 07:22:26.751406 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:22:37 crc kubenswrapper[4632]: I1201 07:22:37.750297 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:22:37 crc kubenswrapper[4632]: E1201 07:22:37.750852 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:22:48 crc kubenswrapper[4632]: I1201 07:22:48.750532 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:22:48 crc kubenswrapper[4632]: E1201 07:22:48.751156 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:23:03 crc kubenswrapper[4632]: I1201 07:23:03.750177 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:23:03 crc kubenswrapper[4632]: E1201 07:23:03.750876 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:23:14 crc kubenswrapper[4632]: I1201 07:23:14.749933 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:23:14 crc kubenswrapper[4632]: E1201 07:23:14.750640 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:23:27 crc kubenswrapper[4632]: I1201 07:23:27.750662 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:23:27 crc kubenswrapper[4632]: E1201 07:23:27.751420 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:23:39 crc kubenswrapper[4632]: I1201 07:23:39.750586 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:23:39 crc kubenswrapper[4632]: E1201 07:23:39.751483 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:23:50 crc kubenswrapper[4632]: I1201 07:23:50.777000 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:23:50 crc kubenswrapper[4632]: E1201 07:23:50.778530 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:24:02 crc kubenswrapper[4632]: I1201 07:24:02.750292 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:24:02 crc kubenswrapper[4632]: E1201 07:24:02.751181 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:24:14 crc kubenswrapper[4632]: I1201 07:24:14.750327 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:24:14 crc kubenswrapper[4632]: E1201 07:24:14.751249 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:24:26 crc kubenswrapper[4632]: I1201 07:24:26.750776 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:24:26 crc kubenswrapper[4632]: E1201 07:24:26.751608 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:24:37 crc kubenswrapper[4632]: I1201 07:24:37.750703 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:24:37 crc kubenswrapper[4632]: E1201 07:24:37.752794 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:24:52 crc kubenswrapper[4632]: I1201 07:24:52.750745 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:24:53 crc kubenswrapper[4632]: I1201 07:24:53.137171 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b"} Dec 01 07:27:19 crc kubenswrapper[4632]: I1201 07:27:19.498416 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:27:19 crc kubenswrapper[4632]: I1201 07:27:19.498979 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:27:49 crc kubenswrapper[4632]: I1201 07:27:49.498502 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:27:49 crc kubenswrapper[4632]: I1201 07:27:49.499113 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.498409 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.498983 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.499029 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.499555 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.499611 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b" gracePeriod=600 Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.684163 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b" exitCode=0 Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.684385 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b"} Dec 01 07:28:19 crc kubenswrapper[4632]: I1201 07:28:19.684458 4632 scope.go:117] "RemoveContainer" containerID="d699f875620474d295124914cdddb330070d3a3aa29f909e20f0da3707a4b92b" Dec 01 07:28:20 crc kubenswrapper[4632]: I1201 07:28:20.695143 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689"} Dec 01 07:28:31 crc kubenswrapper[4632]: I1201 07:28:31.801144 4632 generic.go:334] "Generic (PLEG): container finished" podID="713aa788-d673-4113-93f4-760c3d3714cc" containerID="6b128aa041833ad6e08678378ca455f37f6e0b192318c6b29d7015ce8d489333" exitCode=0 Dec 01 07:28:31 crc kubenswrapper[4632]: I1201 07:28:31.801235 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"713aa788-d673-4113-93f4-760c3d3714cc","Type":"ContainerDied","Data":"6b128aa041833ad6e08678378ca455f37f6e0b192318c6b29d7015ce8d489333"} Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.149587 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328094 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328198 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328288 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328481 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328591 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328698 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5slj\" (UniqueName: \"kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328725 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328751 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328804 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir\") pod \"713aa788-d673-4113-93f4-760c3d3714cc\" (UID: \"713aa788-d673-4113-93f4-760c3d3714cc\") " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.328971 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data" (OuterVolumeSpecName: "config-data") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.329386 4632 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-config-data\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.330242 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.335158 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj" (OuterVolumeSpecName: "kube-api-access-b5slj") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "kube-api-access-b5slj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.337028 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.337617 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.353898 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.357755 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.360684 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.368010 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "713aa788-d673-4113-93f4-760c3d3714cc" (UID: "713aa788-d673-4113-93f4-760c3d3714cc"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431064 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5slj\" (UniqueName: \"kubernetes.io/projected/713aa788-d673-4113-93f4-760c3d3714cc-kube-api-access-b5slj\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431096 4632 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431105 4632 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431115 4632 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/713aa788-d673-4113-93f4-760c3d3714cc-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431125 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431153 4632 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431161 4632 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.431171 4632 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/713aa788-d673-4113-93f4-760c3d3714cc-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.445527 4632 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.532828 4632 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.745684 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746041 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="extract-utilities" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746059 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="extract-utilities" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746084 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="extract-content" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746092 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="extract-content" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746104 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746109 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746121 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="713aa788-d673-4113-93f4-760c3d3714cc" containerName="tempest-tests-tempest-tests-runner" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746127 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="713aa788-d673-4113-93f4-760c3d3714cc" containerName="tempest-tests-tempest-tests-runner" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746147 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="extract-content" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746152 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="extract-content" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746161 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="extract-utilities" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746165 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="extract-utilities" Dec 01 07:28:33 crc kubenswrapper[4632]: E1201 07:28:33.746181 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746187 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746339 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad541747-dff5-46e8-afbb-de073004ab7d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746372 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="713aa788-d673-4113-93f4-760c3d3714cc" containerName="tempest-tests-tempest-tests-runner" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.746381 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="c746b5ce-886f-47b9-8efb-7222aeb9f04d" containerName="registry-server" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.747464 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.758741 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.823783 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"713aa788-d673-4113-93f4-760c3d3714cc","Type":"ContainerDied","Data":"65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180"} Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.823824 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65b1ecdbb1595eeccbaee7d3195b88b05ea241b60b897239efc72c725755c180" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.823929 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.840293 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.840343 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8qp4\" (UniqueName: \"kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.841039 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.944001 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.944071 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8qp4\" (UniqueName: \"kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.944415 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.944814 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.945112 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:33 crc kubenswrapper[4632]: I1201 07:28:33.963681 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8qp4\" (UniqueName: \"kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4\") pod \"redhat-marketplace-c6gjx\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.062142 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.474629 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:34 crc kubenswrapper[4632]: W1201 07:28:34.475331 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbee9803f_e1f2_43dd_90bc_2ba3d35504c0.slice/crio-bbabb7f28ec8dcca1f912effc97d609fa1859ef9c1d1cdd2ae883671e892ce08 WatchSource:0}: Error finding container bbabb7f28ec8dcca1f912effc97d609fa1859ef9c1d1cdd2ae883671e892ce08: Status 404 returned error can't find the container with id bbabb7f28ec8dcca1f912effc97d609fa1859ef9c1d1cdd2ae883671e892ce08 Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.836401 4632 generic.go:334] "Generic (PLEG): container finished" podID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerID="df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824" exitCode=0 Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.836454 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerDied","Data":"df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824"} Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.836711 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerStarted","Data":"bbabb7f28ec8dcca1f912effc97d609fa1859ef9c1d1cdd2ae883671e892ce08"} Dec 01 07:28:34 crc kubenswrapper[4632]: I1201 07:28:34.839398 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:28:35 crc kubenswrapper[4632]: I1201 07:28:35.855508 4632 generic.go:334] "Generic (PLEG): container finished" podID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerID="43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6" exitCode=0 Dec 01 07:28:35 crc kubenswrapper[4632]: I1201 07:28:35.855779 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerDied","Data":"43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6"} Dec 01 07:28:36 crc kubenswrapper[4632]: I1201 07:28:36.872118 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerStarted","Data":"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9"} Dec 01 07:28:36 crc kubenswrapper[4632]: I1201 07:28:36.901601 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c6gjx" podStartSLOduration=2.161206918 podStartE2EDuration="3.901569444s" podCreationTimestamp="2025-12-01 07:28:33 +0000 UTC" firstStartedPulling="2025-12-01 07:28:34.839092001 +0000 UTC m=+2724.404104964" lastFinishedPulling="2025-12-01 07:28:36.579454517 +0000 UTC m=+2726.144467490" observedRunningTime="2025-12-01 07:28:36.889876791 +0000 UTC m=+2726.454889763" watchObservedRunningTime="2025-12-01 07:28:36.901569444 +0000 UTC m=+2726.466582417" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.715918 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.718333 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.720419 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-xhq49" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.721893 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.844516 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hczw\" (UniqueName: \"kubernetes.io/projected/b7e83dda-e4f1-4a62-b293-730dcafe5d39-kube-api-access-5hczw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.844835 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.946489 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hczw\" (UniqueName: \"kubernetes.io/projected/b7e83dda-e4f1-4a62-b293-730dcafe5d39-kube-api-access-5hczw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.946548 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.946914 4632 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.964954 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hczw\" (UniqueName: \"kubernetes.io/projected/b7e83dda-e4f1-4a62-b293-730dcafe5d39-kube-api-access-5hczw\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:43 crc kubenswrapper[4632]: I1201 07:28:43.969806 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"b7e83dda-e4f1-4a62-b293-730dcafe5d39\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.040021 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.063126 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.063183 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.118811 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.441345 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.948788 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b7e83dda-e4f1-4a62-b293-730dcafe5d39","Type":"ContainerStarted","Data":"62856bfdd2384213cf30c97bb1103c5bac05ec1ec05f595072588deb9a7c8770"} Dec 01 07:28:44 crc kubenswrapper[4632]: I1201 07:28:44.982547 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:45 crc kubenswrapper[4632]: I1201 07:28:45.031454 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:45 crc kubenswrapper[4632]: I1201 07:28:45.958266 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"b7e83dda-e4f1-4a62-b293-730dcafe5d39","Type":"ContainerStarted","Data":"2b2c926a1efbc31d304db668ba6906fb78b4a829e7b01e1ff1d84c9129d28861"} Dec 01 07:28:46 crc kubenswrapper[4632]: I1201 07:28:46.967283 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c6gjx" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="registry-server" containerID="cri-o://1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9" gracePeriod=2 Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.386905 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.408194 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=3.497583061 podStartE2EDuration="4.408174917s" podCreationTimestamp="2025-12-01 07:28:43 +0000 UTC" firstStartedPulling="2025-12-01 07:28:44.445249815 +0000 UTC m=+2734.010262788" lastFinishedPulling="2025-12-01 07:28:45.355841672 +0000 UTC m=+2734.920854644" observedRunningTime="2025-12-01 07:28:45.971681604 +0000 UTC m=+2735.536694577" watchObservedRunningTime="2025-12-01 07:28:47.408174917 +0000 UTC m=+2736.973187890" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.533159 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities\") pod \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.533274 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8qp4\" (UniqueName: \"kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4\") pod \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.533637 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content\") pod \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\" (UID: \"bee9803f-e1f2-43dd-90bc-2ba3d35504c0\") " Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.534712 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities" (OuterVolumeSpecName: "utilities") pod "bee9803f-e1f2-43dd-90bc-2ba3d35504c0" (UID: "bee9803f-e1f2-43dd-90bc-2ba3d35504c0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.538866 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4" (OuterVolumeSpecName: "kube-api-access-q8qp4") pod "bee9803f-e1f2-43dd-90bc-2ba3d35504c0" (UID: "bee9803f-e1f2-43dd-90bc-2ba3d35504c0"). InnerVolumeSpecName "kube-api-access-q8qp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.552267 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bee9803f-e1f2-43dd-90bc-2ba3d35504c0" (UID: "bee9803f-e1f2-43dd-90bc-2ba3d35504c0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.636624 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.636656 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.636667 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8qp4\" (UniqueName: \"kubernetes.io/projected/bee9803f-e1f2-43dd-90bc-2ba3d35504c0-kube-api-access-q8qp4\") on node \"crc\" DevicePath \"\"" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.977477 4632 generic.go:334] "Generic (PLEG): container finished" podID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerID="1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9" exitCode=0 Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.977530 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerDied","Data":"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9"} Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.977573 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c6gjx" event={"ID":"bee9803f-e1f2-43dd-90bc-2ba3d35504c0","Type":"ContainerDied","Data":"bbabb7f28ec8dcca1f912effc97d609fa1859ef9c1d1cdd2ae883671e892ce08"} Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.977587 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c6gjx" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.977594 4632 scope.go:117] "RemoveContainer" containerID="1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9" Dec 01 07:28:47 crc kubenswrapper[4632]: I1201 07:28:47.994777 4632 scope.go:117] "RemoveContainer" containerID="43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.008037 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.013290 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c6gjx"] Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.027179 4632 scope.go:117] "RemoveContainer" containerID="df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.044748 4632 scope.go:117] "RemoveContainer" containerID="1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9" Dec 01 07:28:48 crc kubenswrapper[4632]: E1201 07:28:48.045044 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9\": container with ID starting with 1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9 not found: ID does not exist" containerID="1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.045076 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9"} err="failed to get container status \"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9\": rpc error: code = NotFound desc = could not find container \"1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9\": container with ID starting with 1a1755588ed79d02600f1a34b26b423fd04124562c8077d622a254e4b776eeb9 not found: ID does not exist" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.045106 4632 scope.go:117] "RemoveContainer" containerID="43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6" Dec 01 07:28:48 crc kubenswrapper[4632]: E1201 07:28:48.045379 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6\": container with ID starting with 43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6 not found: ID does not exist" containerID="43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.045400 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6"} err="failed to get container status \"43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6\": rpc error: code = NotFound desc = could not find container \"43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6\": container with ID starting with 43d792ba423ccb8d53705eb140b6d8b5bdd3fa8231b76904e68d45fe1160a9a6 not found: ID does not exist" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.045417 4632 scope.go:117] "RemoveContainer" containerID="df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824" Dec 01 07:28:48 crc kubenswrapper[4632]: E1201 07:28:48.045635 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824\": container with ID starting with df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824 not found: ID does not exist" containerID="df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.045667 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824"} err="failed to get container status \"df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824\": rpc error: code = NotFound desc = could not find container \"df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824\": container with ID starting with df2a77341dcac8e3a2f556d3095be4adfbccf7778488d6e15206594203ce6824 not found: ID does not exist" Dec 01 07:28:48 crc kubenswrapper[4632]: I1201 07:28:48.770927 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" path="/var/lib/kubelet/pods/bee9803f-e1f2-43dd-90bc-2ba3d35504c0/volumes" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.954410 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bth4h/must-gather-wtlhd"] Dec 01 07:29:03 crc kubenswrapper[4632]: E1201 07:29:03.956505 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="registry-server" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.956632 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="registry-server" Dec 01 07:29:03 crc kubenswrapper[4632]: E1201 07:29:03.956719 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="extract-utilities" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.956778 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="extract-utilities" Dec 01 07:29:03 crc kubenswrapper[4632]: E1201 07:29:03.956869 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="extract-content" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.956936 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="extract-content" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.957219 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="bee9803f-e1f2-43dd-90bc-2ba3d35504c0" containerName="registry-server" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.958391 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.961734 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bth4h"/"openshift-service-ca.crt" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.962053 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-bth4h"/"kube-root-ca.crt" Dec 01 07:29:03 crc kubenswrapper[4632]: I1201 07:29:03.980394 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bth4h/must-gather-wtlhd"] Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.141052 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwjwz\" (UniqueName: \"kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.141128 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.244551 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.244787 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwjwz\" (UniqueName: \"kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.245148 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.266924 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwjwz\" (UniqueName: \"kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz\") pod \"must-gather-wtlhd\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.284019 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:29:04 crc kubenswrapper[4632]: W1201 07:29:04.726520 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51b93b9c_d69c_4fde_9d72_e906a394467e.slice/crio-ed0212ec00555de373695f477869d7935e7b70ef3dd9885e61b6109304a868df WatchSource:0}: Error finding container ed0212ec00555de373695f477869d7935e7b70ef3dd9885e61b6109304a868df: Status 404 returned error can't find the container with id ed0212ec00555de373695f477869d7935e7b70ef3dd9885e61b6109304a868df Dec 01 07:29:04 crc kubenswrapper[4632]: I1201 07:29:04.728001 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-bth4h/must-gather-wtlhd"] Dec 01 07:29:05 crc kubenswrapper[4632]: I1201 07:29:05.111117 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/must-gather-wtlhd" event={"ID":"51b93b9c-d69c-4fde-9d72-e906a394467e","Type":"ContainerStarted","Data":"ed0212ec00555de373695f477869d7935e7b70ef3dd9885e61b6109304a868df"} Dec 01 07:29:09 crc kubenswrapper[4632]: I1201 07:29:09.152208 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/must-gather-wtlhd" event={"ID":"51b93b9c-d69c-4fde-9d72-e906a394467e","Type":"ContainerStarted","Data":"af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0"} Dec 01 07:29:09 crc kubenswrapper[4632]: I1201 07:29:09.152776 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/must-gather-wtlhd" event={"ID":"51b93b9c-d69c-4fde-9d72-e906a394467e","Type":"ContainerStarted","Data":"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb"} Dec 01 07:29:09 crc kubenswrapper[4632]: I1201 07:29:09.169505 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-bth4h/must-gather-wtlhd" podStartSLOduration=2.654260381 podStartE2EDuration="6.169483394s" podCreationTimestamp="2025-12-01 07:29:03 +0000 UTC" firstStartedPulling="2025-12-01 07:29:04.7293065 +0000 UTC m=+2754.294319473" lastFinishedPulling="2025-12-01 07:29:08.244529513 +0000 UTC m=+2757.809542486" observedRunningTime="2025-12-01 07:29:09.166046426 +0000 UTC m=+2758.731059400" watchObservedRunningTime="2025-12-01 07:29:09.169483394 +0000 UTC m=+2758.734496366" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.371331 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bth4h/crc-debug-p5lrr"] Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.372709 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.374426 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-bth4h"/"default-dockercfg-768jk" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.414839 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.415074 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn9rg\" (UniqueName: \"kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.517036 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.517151 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.517213 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn9rg\" (UniqueName: \"kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.536990 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn9rg\" (UniqueName: \"kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg\") pod \"crc-debug-p5lrr\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: I1201 07:29:11.685780 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:11 crc kubenswrapper[4632]: W1201 07:29:11.716346 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod886f3d31_907f_46f9_ae99_ab4cdb293e53.slice/crio-00cb125c5eafa550590c4e972e4eb658379225a2627725cc0d0b3ee340c6fae6 WatchSource:0}: Error finding container 00cb125c5eafa550590c4e972e4eb658379225a2627725cc0d0b3ee340c6fae6: Status 404 returned error can't find the container with id 00cb125c5eafa550590c4e972e4eb658379225a2627725cc0d0b3ee340c6fae6 Dec 01 07:29:12 crc kubenswrapper[4632]: I1201 07:29:12.179043 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" event={"ID":"886f3d31-907f-46f9-ae99-ab4cdb293e53","Type":"ContainerStarted","Data":"00cb125c5eafa550590c4e972e4eb658379225a2627725cc0d0b3ee340c6fae6"} Dec 01 07:29:22 crc kubenswrapper[4632]: I1201 07:29:22.265671 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" event={"ID":"886f3d31-907f-46f9-ae99-ab4cdb293e53","Type":"ContainerStarted","Data":"8117113b8f7693034c7294799c7262827034417853c043d634237d50cb9e4189"} Dec 01 07:29:51 crc kubenswrapper[4632]: I1201 07:29:51.505549 4632 generic.go:334] "Generic (PLEG): container finished" podID="886f3d31-907f-46f9-ae99-ab4cdb293e53" containerID="8117113b8f7693034c7294799c7262827034417853c043d634237d50cb9e4189" exitCode=0 Dec 01 07:29:51 crc kubenswrapper[4632]: I1201 07:29:51.505991 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" event={"ID":"886f3d31-907f-46f9-ae99-ab4cdb293e53","Type":"ContainerDied","Data":"8117113b8f7693034c7294799c7262827034417853c043d634237d50cb9e4189"} Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.587047 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.618225 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-p5lrr"] Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.623630 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-p5lrr"] Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.639332 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nn9rg\" (UniqueName: \"kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg\") pod \"886f3d31-907f-46f9-ae99-ab4cdb293e53\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.639601 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host\") pod \"886f3d31-907f-46f9-ae99-ab4cdb293e53\" (UID: \"886f3d31-907f-46f9-ae99-ab4cdb293e53\") " Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.639673 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host" (OuterVolumeSpecName: "host") pod "886f3d31-907f-46f9-ae99-ab4cdb293e53" (UID: "886f3d31-907f-46f9-ae99-ab4cdb293e53"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.640112 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/886f3d31-907f-46f9-ae99-ab4cdb293e53-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.644263 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg" (OuterVolumeSpecName: "kube-api-access-nn9rg") pod "886f3d31-907f-46f9-ae99-ab4cdb293e53" (UID: "886f3d31-907f-46f9-ae99-ab4cdb293e53"). InnerVolumeSpecName "kube-api-access-nn9rg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.742923 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nn9rg\" (UniqueName: \"kubernetes.io/projected/886f3d31-907f-46f9-ae99-ab4cdb293e53-kube-api-access-nn9rg\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:52 crc kubenswrapper[4632]: I1201 07:29:52.760563 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="886f3d31-907f-46f9-ae99-ab4cdb293e53" path="/var/lib/kubelet/pods/886f3d31-907f-46f9-ae99-ab4cdb293e53/volumes" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.528863 4632 scope.go:117] "RemoveContainer" containerID="8117113b8f7693034c7294799c7262827034417853c043d634237d50cb9e4189" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.528887 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-p5lrr" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.786315 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bth4h/crc-debug-xr5m7"] Dec 01 07:29:53 crc kubenswrapper[4632]: E1201 07:29:53.787120 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="886f3d31-907f-46f9-ae99-ab4cdb293e53" containerName="container-00" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.787137 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="886f3d31-907f-46f9-ae99-ab4cdb293e53" containerName="container-00" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.787347 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="886f3d31-907f-46f9-ae99-ab4cdb293e53" containerName="container-00" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.788074 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.789863 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-bth4h"/"default-dockercfg-768jk" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.963868 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:53 crc kubenswrapper[4632]: I1201 07:29:53.964157 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgpmj\" (UniqueName: \"kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.066279 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.066418 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgpmj\" (UniqueName: \"kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.066469 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.084004 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgpmj\" (UniqueName: \"kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj\") pod \"crc-debug-xr5m7\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.102815 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.542128 4632 generic.go:334] "Generic (PLEG): container finished" podID="fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" containerID="66f7c71611d27844b52a567cb80f2eb99d595dd4097188a14435e95454e7dff3" exitCode=0 Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.542209 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" event={"ID":"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd","Type":"ContainerDied","Data":"66f7c71611d27844b52a567cb80f2eb99d595dd4097188a14435e95454e7dff3"} Dec 01 07:29:54 crc kubenswrapper[4632]: I1201 07:29:54.542562 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" event={"ID":"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd","Type":"ContainerStarted","Data":"c7cfba84a4ed2478658561181476ba2d6da5052a0bee359780a2663aba164767"} Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.010554 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-xr5m7"] Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.016015 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-xr5m7"] Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.621085 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.801272 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgpmj\" (UniqueName: \"kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj\") pod \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.801521 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host\") pod \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\" (UID: \"fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd\") " Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.801651 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host" (OuterVolumeSpecName: "host") pod "fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" (UID: "fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.801981 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.806016 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj" (OuterVolumeSpecName: "kube-api-access-sgpmj") pod "fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" (UID: "fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd"). InnerVolumeSpecName "kube-api-access-sgpmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:29:55 crc kubenswrapper[4632]: I1201 07:29:55.904317 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgpmj\" (UniqueName: \"kubernetes.io/projected/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd-kube-api-access-sgpmj\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.149632 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-bth4h/crc-debug-kx2j8"] Dec 01 07:29:56 crc kubenswrapper[4632]: E1201 07:29:56.150128 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" containerName="container-00" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.150145 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" containerName="container-00" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.150377 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" containerName="container-00" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.151115 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.210005 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz9hm\" (UniqueName: \"kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.210049 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.313060 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz9hm\" (UniqueName: \"kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.313109 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.313298 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.330585 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz9hm\" (UniqueName: \"kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm\") pod \"crc-debug-kx2j8\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.468702 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:56 crc kubenswrapper[4632]: W1201 07:29:56.497106 4632 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14a5d0f7_ae81_4944_8ea3_49c6b637ce94.slice/crio-58c8c107594c0e2f52139c936e01e8ec9ef4e0d5f4f254d8e700b2165b840491 WatchSource:0}: Error finding container 58c8c107594c0e2f52139c936e01e8ec9ef4e0d5f4f254d8e700b2165b840491: Status 404 returned error can't find the container with id 58c8c107594c0e2f52139c936e01e8ec9ef4e0d5f4f254d8e700b2165b840491 Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.558286 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7cfba84a4ed2478658561181476ba2d6da5052a0bee359780a2663aba164767" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.558301 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-xr5m7" Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.559370 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" event={"ID":"14a5d0f7-ae81-4944-8ea3-49c6b637ce94","Type":"ContainerStarted","Data":"58c8c107594c0e2f52139c936e01e8ec9ef4e0d5f4f254d8e700b2165b840491"} Dec 01 07:29:56 crc kubenswrapper[4632]: I1201 07:29:56.758913 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd" path="/var/lib/kubelet/pods/fd0a8aa2-1e57-46cc-8014-4f0cd3ae74bd/volumes" Dec 01 07:29:57 crc kubenswrapper[4632]: I1201 07:29:57.567931 4632 generic.go:334] "Generic (PLEG): container finished" podID="14a5d0f7-ae81-4944-8ea3-49c6b637ce94" containerID="41885641a3ae1dbf55f4cfe6d3d2abb89774a89881006d85a2de71df198fe9f3" exitCode=0 Dec 01 07:29:57 crc kubenswrapper[4632]: I1201 07:29:57.568025 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" event={"ID":"14a5d0f7-ae81-4944-8ea3-49c6b637ce94","Type":"ContainerDied","Data":"41885641a3ae1dbf55f4cfe6d3d2abb89774a89881006d85a2de71df198fe9f3"} Dec 01 07:29:57 crc kubenswrapper[4632]: I1201 07:29:57.603620 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-kx2j8"] Dec 01 07:29:57 crc kubenswrapper[4632]: I1201 07:29:57.608135 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bth4h/crc-debug-kx2j8"] Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.647947 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.765665 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host\") pod \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.765756 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host" (OuterVolumeSpecName: "host") pod "14a5d0f7-ae81-4944-8ea3-49c6b637ce94" (UID: "14a5d0f7-ae81-4944-8ea3-49c6b637ce94"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.766219 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz9hm\" (UniqueName: \"kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm\") pod \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\" (UID: \"14a5d0f7-ae81-4944-8ea3-49c6b637ce94\") " Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.767014 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.771993 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm" (OuterVolumeSpecName: "kube-api-access-cz9hm") pod "14a5d0f7-ae81-4944-8ea3-49c6b637ce94" (UID: "14a5d0f7-ae81-4944-8ea3-49c6b637ce94"). InnerVolumeSpecName "kube-api-access-cz9hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:29:58 crc kubenswrapper[4632]: I1201 07:29:58.871562 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz9hm\" (UniqueName: \"kubernetes.io/projected/14a5d0f7-ae81-4944-8ea3-49c6b637ce94-kube-api-access-cz9hm\") on node \"crc\" DevicePath \"\"" Dec 01 07:29:59 crc kubenswrapper[4632]: I1201 07:29:59.585665 4632 scope.go:117] "RemoveContainer" containerID="41885641a3ae1dbf55f4cfe6d3d2abb89774a89881006d85a2de71df198fe9f3" Dec 01 07:29:59 crc kubenswrapper[4632]: I1201 07:29:59.585768 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/crc-debug-kx2j8" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.135876 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c"] Dec 01 07:30:00 crc kubenswrapper[4632]: E1201 07:30:00.136521 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a5d0f7-ae81-4944-8ea3-49c6b637ce94" containerName="container-00" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.136534 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a5d0f7-ae81-4944-8ea3-49c6b637ce94" containerName="container-00" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.136729 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a5d0f7-ae81-4944-8ea3-49c6b637ce94" containerName="container-00" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.137332 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.141300 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.141502 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.145347 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c"] Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.304041 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.304361 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.304394 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzsmx\" (UniqueName: \"kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.406184 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.406465 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.406493 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzsmx\" (UniqueName: \"kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.407308 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.411737 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.422116 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzsmx\" (UniqueName: \"kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx\") pod \"collect-profiles-29409570-plg2c\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.463907 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.760906 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a5d0f7-ae81-4944-8ea3-49c6b637ce94" path="/var/lib/kubelet/pods/14a5d0f7-ae81-4944-8ea3-49c6b637ce94/volumes" Dec 01 07:30:00 crc kubenswrapper[4632]: I1201 07:30:00.884241 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c"] Dec 01 07:30:01 crc kubenswrapper[4632]: I1201 07:30:01.613199 4632 generic.go:334] "Generic (PLEG): container finished" podID="2193cc60-8657-41f8-bbca-863a26ac235e" containerID="dffd08433fb4284d8ca8befbdd4b9031500658d37d3dc03590b192ea1c0ea237" exitCode=0 Dec 01 07:30:01 crc kubenswrapper[4632]: I1201 07:30:01.613265 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" event={"ID":"2193cc60-8657-41f8-bbca-863a26ac235e","Type":"ContainerDied","Data":"dffd08433fb4284d8ca8befbdd4b9031500658d37d3dc03590b192ea1c0ea237"} Dec 01 07:30:01 crc kubenswrapper[4632]: I1201 07:30:01.613765 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" event={"ID":"2193cc60-8657-41f8-bbca-863a26ac235e","Type":"ContainerStarted","Data":"a6be2319b6a3faf1145a32c3df8f2139171a96a504f51ef3bbd90b21f830b00e"} Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.894940 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.959201 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzsmx\" (UniqueName: \"kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx\") pod \"2193cc60-8657-41f8-bbca-863a26ac235e\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.959265 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume\") pod \"2193cc60-8657-41f8-bbca-863a26ac235e\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.959320 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume\") pod \"2193cc60-8657-41f8-bbca-863a26ac235e\" (UID: \"2193cc60-8657-41f8-bbca-863a26ac235e\") " Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.959912 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume" (OuterVolumeSpecName: "config-volume") pod "2193cc60-8657-41f8-bbca-863a26ac235e" (UID: "2193cc60-8657-41f8-bbca-863a26ac235e"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.966093 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx" (OuterVolumeSpecName: "kube-api-access-lzsmx") pod "2193cc60-8657-41f8-bbca-863a26ac235e" (UID: "2193cc60-8657-41f8-bbca-863a26ac235e"). InnerVolumeSpecName "kube-api-access-lzsmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:30:02 crc kubenswrapper[4632]: I1201 07:30:02.969506 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "2193cc60-8657-41f8-bbca-863a26ac235e" (UID: "2193cc60-8657-41f8-bbca-863a26ac235e"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.061401 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzsmx\" (UniqueName: \"kubernetes.io/projected/2193cc60-8657-41f8-bbca-863a26ac235e-kube-api-access-lzsmx\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.061432 4632 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/2193cc60-8657-41f8-bbca-863a26ac235e-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.061442 4632 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/2193cc60-8657-41f8-bbca-863a26ac235e-config-volume\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.630112 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" event={"ID":"2193cc60-8657-41f8-bbca-863a26ac235e","Type":"ContainerDied","Data":"a6be2319b6a3faf1145a32c3df8f2139171a96a504f51ef3bbd90b21f830b00e"} Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.630464 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6be2319b6a3faf1145a32c3df8f2139171a96a504f51ef3bbd90b21f830b00e" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.630315 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29409570-plg2c" Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.969166 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6"] Dec 01 07:30:03 crc kubenswrapper[4632]: I1201 07:30:03.975796 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29409525-ndjg6"] Dec 01 07:30:04 crc kubenswrapper[4632]: I1201 07:30:04.760242 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dab5433a-456e-4006-a05d-a2f04ebe1330" path="/var/lib/kubelet/pods/dab5433a-456e-4006-a05d-a2f04ebe1330/volumes" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.645182 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5cb7c5c954-lttm2_d67e2874-71ae-4106-80fd-5361439b8ea5/barbican-api/0.log" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.787155 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5cb7c5c954-lttm2_d67e2874-71ae-4106-80fd-5361439b8ea5/barbican-api-log/0.log" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.816801 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:10 crc kubenswrapper[4632]: E1201 07:30:10.822179 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2193cc60-8657-41f8-bbca-863a26ac235e" containerName="collect-profiles" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.822223 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2193cc60-8657-41f8-bbca-863a26ac235e" containerName="collect-profiles" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.822525 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="2193cc60-8657-41f8-bbca-863a26ac235e" containerName="collect-profiles" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.823961 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.826713 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.867912 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b4798-zm5jf_16fb6740-33c0-4a6d-8711-34f7520087a5/barbican-keystone-listener/0.log" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.869703 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b4798-zm5jf_16fb6740-33c0-4a6d-8711-34f7520087a5/barbican-keystone-listener-log/0.log" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.925324 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.925485 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:10 crc kubenswrapper[4632]: I1201 07:30:10.925731 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9gcc\" (UniqueName: \"kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.028088 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.028195 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.028280 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9gcc\" (UniqueName: \"kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.028563 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.028687 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.044957 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9gcc\" (UniqueName: \"kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc\") pod \"redhat-operators-8lbcn\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.055942 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5cffc97f9c-677mp_aec57a49-c244-4fad-81c2-b29649e62945/barbican-worker/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.063875 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5cffc97f9c-677mp_aec57a49-c244-4fad-81c2-b29649e62945/barbican-worker-log/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.150028 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.235883 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w_0fa69e48-53f9-4bb5-9e11-a9afde0d8912/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.303875 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/ceilometer-central-agent/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.389048 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/ceilometer-notification-agent/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.497057 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/proxy-httpd/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.557934 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/sg-core/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.579806 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3/cinder-api/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.641941 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.721945 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerStarted","Data":"ea45927f991a8d4e9162673005179f25f983e490d161dc2274edd5256e307d15"} Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.735054 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3/cinder-api-log/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.827289 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_08847455-e239-4d88-ba2d-0e17255fcaa3/cinder-scheduler/0.log" Dec 01 07:30:11 crc kubenswrapper[4632]: I1201 07:30:11.924280 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_08847455-e239-4d88-ba2d-0e17255fcaa3/probe/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.174257 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw_3cb46d14-bcaa-4c50-99c0-5d6693557f5d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.220188 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-bp886_c30c10f4-9c67-4caf-8858-a2e74307ee33/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.523110 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/init/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.573875 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/init/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.631643 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/dnsmasq-dns/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.702638 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-l89fl_6f79d230-5b05-468d-bf19-bb6a792c6b5d/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.730942 4632 generic.go:334] "Generic (PLEG): container finished" podID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerID="6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4" exitCode=0 Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.730986 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerDied","Data":"6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4"} Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.829720 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e8ac0c6b-4bf6-4259-bad6-9c0620047334/glance-httpd/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.885869 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e8ac0c6b-4bf6-4259-bad6-9c0620047334/glance-log/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.974104 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3524161d-d124-4b04-9b25-73e2e3188c7f/glance-httpd/0.log" Dec 01 07:30:12 crc kubenswrapper[4632]: I1201 07:30:12.987811 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3524161d-d124-4b04-9b25-73e2e3188c7f/glance-log/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.073944 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-44xkb_252ea9f1-a749-4524-9a53-dffbad624ea7/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.179748 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-9frjq_2421bc45-8d08-4634-861d-e3f185b01e54/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.376107 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5d8c896fc4-b4shb_ebd84a7e-560e-4bc0-b3e7-2f2c0843d789/keystone-api/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.383112 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29409541-h825b_9b87ff38-8ea4-4e1e-8553-aa3069f0223a/keystone-cron/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.542644 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7ada4ec3-07bb-43f8-9d48-30d2075314d0/kube-state-metrics/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.646991 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq_a2462a74-2ab0-47cc-9bed-77ce67b0a6c5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.739316 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerStarted","Data":"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728"} Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.910521 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594d88dfbf-66tbw_dd0f52ca-bba5-4410-9473-ac86c9839cf6/neutron-httpd/0.log" Dec 01 07:30:13 crc kubenswrapper[4632]: I1201 07:30:13.948761 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594d88dfbf-66tbw_dd0f52ca-bba5-4410-9473-ac86c9839cf6/neutron-api/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.056191 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd_1ca8649a-8e09-4edd-9f1f-72277996e08d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.483059 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9565ad41-8a6b-461d-8299-e8fe256d30eb/nova-api-log/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.553032 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a3304516-deb9-4715-a501-c0b1dbb89945/nova-cell0-conductor-conductor/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.718943 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9565ad41-8a6b-461d-8299-e8fe256d30eb/nova-api-api/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.752667 4632 generic.go:334] "Generic (PLEG): container finished" podID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerID="324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728" exitCode=0 Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.763777 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerDied","Data":"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728"} Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.790491 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_da84cfe8-1321-40a1-a05b-14194e1e7d48/nova-cell1-conductor-conductor/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.918857 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 07:30:14 crc kubenswrapper[4632]: I1201 07:30:14.986926 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t9nbd_a4cb69cd-b4b1-4f58-9553-27564432b39c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.222081 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8beb9f92-1512-4843-a060-e7407372d147/nova-metadata-log/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.436046 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_ec63fa13-7856-421c-ab7f-7281a42a6d67/nova-scheduler-scheduler/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.499283 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/mysql-bootstrap/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.666965 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/mysql-bootstrap/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.679690 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/galera/0.log" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.765877 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerStarted","Data":"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9"} Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.792082 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8lbcn" podStartSLOduration=3.14412437 podStartE2EDuration="5.79195756s" podCreationTimestamp="2025-12-01 07:30:10 +0000 UTC" firstStartedPulling="2025-12-01 07:30:12.732992313 +0000 UTC m=+2822.298005286" lastFinishedPulling="2025-12-01 07:30:15.380825503 +0000 UTC m=+2824.945838476" observedRunningTime="2025-12-01 07:30:15.781499112 +0000 UTC m=+2825.346512085" watchObservedRunningTime="2025-12-01 07:30:15.79195756 +0000 UTC m=+2825.356970533" Dec 01 07:30:15 crc kubenswrapper[4632]: I1201 07:30:15.875672 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/mysql-bootstrap/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.062446 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8beb9f92-1512-4843-a060-e7407372d147/nova-metadata-metadata/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.118097 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/galera/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.205291 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/mysql-bootstrap/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.247097 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c29a6239-304a-4a40-8e32-35dfb513bb8f/openstackclient/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.425678 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qkjhz_e2d9748c-3d24-43dd-a125-3a20cfe296e2/openstack-network-exporter/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.456580 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hw5x4_fe332539-435d-44e0-bcf5-c47332ed1e55/ovn-controller/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.691551 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server-init/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.876918 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.913034 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server-init/0.log" Dec 01 07:30:16 crc kubenswrapper[4632]: I1201 07:30:16.933911 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovs-vswitchd/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.161235 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0182dc49-3707-4d2e-a867-5eb37db588f8/openstack-network-exporter/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.199844 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0182dc49-3707-4d2e-a867-5eb37db588f8/ovn-northd/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.230839 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-r4jjj_a1e089cc-f5f6-476a-af14-d25cd1150efd/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.411032 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f0bb9103-afbb-45ea-9427-c4925dd007c9/ovsdbserver-nb/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.450224 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f0bb9103-afbb-45ea-9427-c4925dd007c9/openstack-network-exporter/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.617480 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c5f52c6d-0a10-43a9-84f8-940c156f3278/openstack-network-exporter/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.647938 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c5f52c6d-0a10-43a9-84f8-940c156f3278/ovsdbserver-sb/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.763860 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8445898876-gptmm_0f2d46d4-637f-441b-8710-f1d82d8a0c11/placement-api/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.901283 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8445898876-gptmm_0f2d46d4-637f-441b-8710-f1d82d8a0c11/placement-log/0.log" Dec 01 07:30:17 crc kubenswrapper[4632]: I1201 07:30:17.948706 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/setup-container/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.135582 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/setup-container/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.197487 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/setup-container/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.222181 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/rabbitmq/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.433096 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/rabbitmq/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.443312 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/setup-container/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.543648 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx_c9a462bc-acd6-4d48-b78b-3584fdb57851/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.643917 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-fx2pb_29eb2b86-6594-44a0-a146-073da23a9341/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.782582 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd_c57a7c91-453f-4fce-8410-abffb3ffe651/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.906046 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-n5wcr_3debc8c5-6f78-44c9-9f2d-4207eeec3b11/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:18 crc kubenswrapper[4632]: I1201 07:30:18.968401 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bqvjq_6e818cb4-7d89-4c61-8bd4-8b15b748ed38/ssh-known-hosts-edpm-deployment/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.190054 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7697b7f499-t6njt_e090ffa7-e9ce-46e9-97e8-8e38155d9241/proxy-server/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.227538 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7697b7f499-t6njt_e090ffa7-e9ce-46e9-97e8-8e38155d9241/proxy-httpd/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.385836 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-j5jrs_6efb3189-8101-4364-93b9-d31c87b9fe71/swift-ring-rebalance/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.426073 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-auditor/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.497441 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.497503 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.497772 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-reaper/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.610721 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-server/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.627486 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-replicator/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.697969 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-auditor/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.722271 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-replicator/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.853837 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-server/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.887499 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-updater/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.899889 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-expirer/0.log" Dec 01 07:30:19 crc kubenswrapper[4632]: I1201 07:30:19.944729 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-auditor/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.049055 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-server/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.101114 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-replicator/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.118806 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-updater/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.127263 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/rsync/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.351833 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-42lvs_eb82c6b3-a652-4d30-a8c9-63f6878557cc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.368376 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/swift-recon-cron/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.560800 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b7e83dda-e4f1-4a62-b293-730dcafe5d39/test-operator-logs-container/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.568109 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_713aa788-d673-4113-93f4-760c3d3714cc/tempest-tests-tempest-tests-runner/0.log" Dec 01 07:30:20 crc kubenswrapper[4632]: I1201 07:30:20.722622 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-znccl_af7575c1-47ca-4c63-bdca-1a42d23485ee/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.150132 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.150193 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.192131 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.319823 4632 scope.go:117] "RemoveContainer" containerID="5e3bdd0d2c7c8f76812b00d29b407885019148b76f8e15acd6bb408e86c3284d" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.863366 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:21 crc kubenswrapper[4632]: I1201 07:30:21.918140 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:23 crc kubenswrapper[4632]: I1201 07:30:23.832546 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8lbcn" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="registry-server" containerID="cri-o://d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9" gracePeriod=2 Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.233893 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.286481 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities\") pod \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.286661 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9gcc\" (UniqueName: \"kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc\") pod \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.286796 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content\") pod \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\" (UID: \"8ff00ff7-8740-4be3-b7ef-08c2bd13f278\") " Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.288009 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities" (OuterVolumeSpecName: "utilities") pod "8ff00ff7-8740-4be3-b7ef-08c2bd13f278" (UID: "8ff00ff7-8740-4be3-b7ef-08c2bd13f278"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.293494 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc" (OuterVolumeSpecName: "kube-api-access-d9gcc") pod "8ff00ff7-8740-4be3-b7ef-08c2bd13f278" (UID: "8ff00ff7-8740-4be3-b7ef-08c2bd13f278"). InnerVolumeSpecName "kube-api-access-d9gcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.382968 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ff00ff7-8740-4be3-b7ef-08c2bd13f278" (UID: "8ff00ff7-8740-4be3-b7ef-08c2bd13f278"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.388442 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9gcc\" (UniqueName: \"kubernetes.io/projected/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-kube-api-access-d9gcc\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.388471 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.388482 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ff00ff7-8740-4be3-b7ef-08c2bd13f278-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.845031 4632 generic.go:334] "Generic (PLEG): container finished" podID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerID="d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9" exitCode=0 Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.845131 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerDied","Data":"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9"} Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.845389 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8lbcn" event={"ID":"8ff00ff7-8740-4be3-b7ef-08c2bd13f278","Type":"ContainerDied","Data":"ea45927f991a8d4e9162673005179f25f983e490d161dc2274edd5256e307d15"} Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.845416 4632 scope.go:117] "RemoveContainer" containerID="d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.845147 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8lbcn" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.872167 4632 scope.go:117] "RemoveContainer" containerID="324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.874653 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.886680 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8lbcn"] Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.904084 4632 scope.go:117] "RemoveContainer" containerID="6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.935169 4632 scope.go:117] "RemoveContainer" containerID="d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9" Dec 01 07:30:24 crc kubenswrapper[4632]: E1201 07:30:24.936142 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9\": container with ID starting with d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9 not found: ID does not exist" containerID="d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.936198 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9"} err="failed to get container status \"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9\": rpc error: code = NotFound desc = could not find container \"d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9\": container with ID starting with d8b6f8c446aad2f7f07ee6c18f6dd03d2fe17d04475d893cc2f2f478a7ecc4f9 not found: ID does not exist" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.936226 4632 scope.go:117] "RemoveContainer" containerID="324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728" Dec 01 07:30:24 crc kubenswrapper[4632]: E1201 07:30:24.937886 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728\": container with ID starting with 324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728 not found: ID does not exist" containerID="324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.937930 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728"} err="failed to get container status \"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728\": rpc error: code = NotFound desc = could not find container \"324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728\": container with ID starting with 324d3a6ee734e5a132555437a52fb248c5f7388acb9713c912938c9140f4c728 not found: ID does not exist" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.937959 4632 scope.go:117] "RemoveContainer" containerID="6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4" Dec 01 07:30:24 crc kubenswrapper[4632]: E1201 07:30:24.938298 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4\": container with ID starting with 6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4 not found: ID does not exist" containerID="6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4" Dec 01 07:30:24 crc kubenswrapper[4632]: I1201 07:30:24.938324 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4"} err="failed to get container status \"6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4\": rpc error: code = NotFound desc = could not find container \"6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4\": container with ID starting with 6795b2239a166e000f4705a7c30aefccb541f199ad7d3420fb5c4641a6131cc4 not found: ID does not exist" Dec 01 07:30:26 crc kubenswrapper[4632]: I1201 07:30:26.760991 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" path="/var/lib/kubelet/pods/8ff00ff7-8740-4be3-b7ef-08c2bd13f278/volumes" Dec 01 07:30:29 crc kubenswrapper[4632]: I1201 07:30:29.256904 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_df701889-0ecf-4452-8689-40cc4c4de347/memcached/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.396982 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.515205 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.546610 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.546813 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.713995 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.719755 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.726891 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/extract/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.880139 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-wpwjc_3ad25430-83fc-45b0-83b1-adbe4e729508/kube-rbac-proxy/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.931751 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-wpwjc_3ad25430-83fc-45b0-83b1-adbe4e729508/manager/0.log" Dec 01 07:30:42 crc kubenswrapper[4632]: I1201 07:30:42.961449 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-4lrqg_fb574298-9e57-474c-9f80-faa7be6cded8/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.078875 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-4lrqg_fb574298-9e57-474c-9f80-faa7be6cded8/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.127699 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-fxrdf_c5aba6fe-c38f-45ec-8057-a19b2636fe68/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.157137 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-fxrdf_c5aba6fe-c38f-45ec-8057-a19b2636fe68/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.300912 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-k89zc_0bb7b633-65c0-4c4e-9fad-648fd779ff4a/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.332779 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-k89zc_0bb7b633-65c0-4c4e-9fad-648fd779ff4a/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.423618 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xjrkn_749f148d-477b-4186-8c5a-ea9f86e4a64b/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.489065 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xjrkn_749f148d-477b-4186-8c5a-ea9f86e4a64b/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.540121 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-knsc7_c470b4eb-c3ca-4117-89ec-5812e4cbcec1/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.594080 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-knsc7_c470b4eb-c3ca-4117-89ec-5812e4cbcec1/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.663892 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bbrm4_731aedb5-2e95-4d08-9a4e-6c27e64d5ea7/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.835303 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bbrm4_731aedb5-2e95-4d08-9a4e-6c27e64d5ea7/manager/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.863385 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hwjzz_ad78f229-4425-4bc0-9721-fcf6c2a067d7/kube-rbac-proxy/0.log" Dec 01 07:30:43 crc kubenswrapper[4632]: I1201 07:30:43.868959 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hwjzz_ad78f229-4425-4bc0-9721-fcf6c2a067d7/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.034562 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-7ct9v_f191cde7-969a-4111-86cf-855623533060/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.120445 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-7ct9v_f191cde7-969a-4111-86cf-855623533060/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.257669 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-fcwbl_768788a8-025e-4e79-a0ec-6bb23a14f72e/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.258411 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-fcwbl_768788a8-025e-4e79-a0ec-6bb23a14f72e/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.293153 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-r9gx9_35336d69-2a15-4513-970c-19e86cbb339f/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.432162 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-r9gx9_35336d69-2a15-4513-970c-19e86cbb339f/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.445213 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_254c38bb-3a55-426d-a497-69b3aa16c639/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.477729 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_254c38bb-3a55-426d-a497-69b3aa16c639/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.647820 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pdsxw_c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.709652 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pdsxw_c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.779501 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-g2j5h_0d6924f1-38a5-434e-99b6-9f9a06ae0894/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.828445 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-g2j5h_0d6924f1-38a5-434e-99b6-9f9a06ae0894/manager/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.882593 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446x48x8_05397964-6686-490a-ab73-ec535a262794/kube-rbac-proxy/0.log" Dec 01 07:30:44 crc kubenswrapper[4632]: I1201 07:30:44.940043 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446x48x8_05397964-6686-490a-ab73-ec535a262794/manager/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.345739 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-p8vpd_fc9f0281-8d41-469a-b0f1-2b9f20245a43/operator/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.411720 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-x4g48_33f69e4f-eb93-4113-80d0-b50fdc5a83f7/registry-server/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.590430 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-7v8l7_ecb68de8-b267-4c69-baf4-078e3feacf8e/kube-rbac-proxy/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.647991 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-7v8l7_ecb68de8-b267-4c69-baf4-078e3feacf8e/manager/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.762785 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-x2k78_a9952ac0-b2d4-4717-823b-5f9f0338fb5f/kube-rbac-proxy/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.820970 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-x2k78_a9952ac0-b2d4-4717-823b-5f9f0338fb5f/manager/0.log" Dec 01 07:30:45 crc kubenswrapper[4632]: I1201 07:30:45.914760 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-7jjwb_ce060aca-e2c3-4454-b126-719a572ece48/operator/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.052120 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-674sk_9744f748-86b6-417c-ab38-18cc3ad9b89a/kube-rbac-proxy/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.125569 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-674sk_9744f748-86b6-417c-ab38-18cc3ad9b89a/manager/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.163989 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-dfqrf_0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9/manager/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.213931 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-x2252_697c8fad-c587-41ce-ae4a-158bb22b6394/kube-rbac-proxy/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.276172 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-x2252_697c8fad-c587-41ce-ae4a-158bb22b6394/manager/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.344493 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4hqhb_5640bad0-ba52-4bc4-845d-d47987318155/kube-rbac-proxy/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.345200 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4hqhb_5640bad0-ba52-4bc4-845d-d47987318155/manager/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.448096 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-zkfsl_756b1531-b2e5-4a10-aad8-ae2378b09a68/kube-rbac-proxy/0.log" Dec 01 07:30:46 crc kubenswrapper[4632]: I1201 07:30:46.503480 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-zkfsl_756b1531-b2e5-4a10-aad8-ae2378b09a68/manager/0.log" Dec 01 07:30:49 crc kubenswrapper[4632]: I1201 07:30:49.497764 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:30:49 crc kubenswrapper[4632]: I1201 07:30:49.498109 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:31:02 crc kubenswrapper[4632]: I1201 07:31:02.090089 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-mxcl4_e354731f-7ed1-47e3-8d64-7d55f1613100/control-plane-machine-set-operator/0.log" Dec 01 07:31:02 crc kubenswrapper[4632]: I1201 07:31:02.268788 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g99wc_e90e3510-a941-487a-af96-639fdc977fbb/machine-api-operator/0.log" Dec 01 07:31:02 crc kubenswrapper[4632]: I1201 07:31:02.268949 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g99wc_e90e3510-a941-487a-af96-639fdc977fbb/kube-rbac-proxy/0.log" Dec 01 07:31:13 crc kubenswrapper[4632]: I1201 07:31:13.192841 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-dntsb_15ee763c-142f-480b-92b5-6360ed211e21/cert-manager-controller/0.log" Dec 01 07:31:13 crc kubenswrapper[4632]: I1201 07:31:13.377629 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-jmgck_91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a/cert-manager-cainjector/0.log" Dec 01 07:31:13 crc kubenswrapper[4632]: I1201 07:31:13.389797 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wjfb2_05af64fb-260a-44bb-a7e8-e3b8ffbee656/cert-manager-webhook/0.log" Dec 01 07:31:19 crc kubenswrapper[4632]: I1201 07:31:19.498407 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:31:19 crc kubenswrapper[4632]: I1201 07:31:19.498841 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:31:19 crc kubenswrapper[4632]: I1201 07:31:19.498888 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:31:19 crc kubenswrapper[4632]: I1201 07:31:19.499654 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:31:19 crc kubenswrapper[4632]: I1201 07:31:19.499709 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" gracePeriod=600 Dec 01 07:31:19 crc kubenswrapper[4632]: E1201 07:31:19.615590 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:31:20 crc kubenswrapper[4632]: I1201 07:31:20.336997 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" exitCode=0 Dec 01 07:31:20 crc kubenswrapper[4632]: I1201 07:31:20.337049 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689"} Dec 01 07:31:20 crc kubenswrapper[4632]: I1201 07:31:20.337089 4632 scope.go:117] "RemoveContainer" containerID="9f6af426575a83107158be6c5915461d388f641893056af8705d97507d209c8b" Dec 01 07:31:20 crc kubenswrapper[4632]: I1201 07:31:20.337612 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:31:20 crc kubenswrapper[4632]: E1201 07:31:20.337994 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.382216 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-m9dcd_6d66d298-9ddf-440c-ace2-14c38dc309b0/nmstate-console-plugin/0.log" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.493097 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-qjq94_f517f423-af69-4a25-a169-e71268fa0ca3/nmstate-handler/0.log" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.523020 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ngl6v_3b03b7ed-47ab-4ae0-95e7-ed1c830fe065/kube-rbac-proxy/0.log" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.553969 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ngl6v_3b03b7ed-47ab-4ae0-95e7-ed1c830fe065/nmstate-metrics/0.log" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.684132 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-w7h9m_7fcf49f3-19eb-41a7-b095-ceea8b76f9bd/nmstate-operator/0.log" Dec 01 07:31:24 crc kubenswrapper[4632]: I1201 07:31:24.758327 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-hmmp7_70be3201-d4ba-4c07-950e-527ad7d2024d/nmstate-webhook/0.log" Dec 01 07:31:31 crc kubenswrapper[4632]: I1201 07:31:31.750747 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:31:31 crc kubenswrapper[4632]: E1201 07:31:31.751650 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.043640 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-knsfp_e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe/kube-rbac-proxy/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.098489 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-knsfp_e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe/controller/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.228878 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.357438 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.367845 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.368686 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.396037 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.554475 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.585427 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.585981 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.585992 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.734554 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.735776 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.735977 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/controller/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.762497 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.925314 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/frr-metrics/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.927687 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/kube-rbac-proxy-frr/0.log" Dec 01 07:31:37 crc kubenswrapper[4632]: I1201 07:31:37.951193 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/kube-rbac-proxy/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.099267 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/reloader/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.118501 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-z9drr_1da19bfe-a759-44f5-9839-b638c45f84b8/frr-k8s-webhook-server/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.316764 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5cdb7cf54d-h7wfl_4cceec72-bb6e-43a4-8b98-8077e45f281c/manager/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.492851 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-696b4c485-xvdlk_c28a59da-1614-46fc-9ece-a4c888e9c53c/webhook-server/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.600332 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rld7v_1f8d778c-51f1-4fed-8f3a-34bd3f603d06/kube-rbac-proxy/0.log" Dec 01 07:31:38 crc kubenswrapper[4632]: I1201 07:31:38.951292 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/frr/0.log" Dec 01 07:31:39 crc kubenswrapper[4632]: I1201 07:31:39.146818 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rld7v_1f8d778c-51f1-4fed-8f3a-34bd3f603d06/speaker/0.log" Dec 01 07:31:45 crc kubenswrapper[4632]: I1201 07:31:45.750164 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:31:45 crc kubenswrapper[4632]: E1201 07:31:45.750843 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:31:49 crc kubenswrapper[4632]: I1201 07:31:49.983842 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.099226 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.136229 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.151100 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.263808 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.281521 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/extract/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.306015 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.407260 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.569319 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.571395 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.593328 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.704759 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.710291 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/extract/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.740163 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.843301 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.968099 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.980296 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:31:50 crc kubenswrapper[4632]: I1201 07:31:50.984495 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.147940 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.151105 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.360090 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-utilities/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.477315 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/registry-server/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.509760 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-utilities/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.543044 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-content/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.550483 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-content/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.855702 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-content/0.log" Dec 01 07:31:51 crc kubenswrapper[4632]: I1201 07:31:51.876804 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.150847 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-mwjfp_2123ef98-d1db-4b71-8657-51be9c899a23/registry-server/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.157538 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-crm4g_5afb9da5-167e-47cf-80fe-e9365ec939fd/marketplace-operator/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.163456 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.286475 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.304661 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.331970 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.457510 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.471336 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.590265 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/registry-server/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.656919 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.801886 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.806590 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:31:52 crc kubenswrapper[4632]: I1201 07:31:52.815535 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:31:53 crc kubenswrapper[4632]: I1201 07:31:53.009839 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:31:53 crc kubenswrapper[4632]: I1201 07:31:53.022032 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:31:53 crc kubenswrapper[4632]: I1201 07:31:53.362767 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/registry-server/0.log" Dec 01 07:32:00 crc kubenswrapper[4632]: I1201 07:32:00.757062 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:32:00 crc kubenswrapper[4632]: E1201 07:32:00.760378 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.845857 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bd2vg"] Dec 01 07:32:02 crc kubenswrapper[4632]: E1201 07:32:02.846631 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="extract-content" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.846648 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="extract-content" Dec 01 07:32:02 crc kubenswrapper[4632]: E1201 07:32:02.846657 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="registry-server" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.846666 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="registry-server" Dec 01 07:32:02 crc kubenswrapper[4632]: E1201 07:32:02.846691 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="extract-utilities" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.846697 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="extract-utilities" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.846964 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ff00ff7-8740-4be3-b7ef-08c2bd13f278" containerName="registry-server" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.848415 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.853383 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bd2vg"] Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.957439 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-utilities\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.957740 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-catalog-content\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:02 crc kubenswrapper[4632]: I1201 07:32:02.957894 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb5sm\" (UniqueName: \"kubernetes.io/projected/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-kube-api-access-jb5sm\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.059668 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-catalog-content\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.059739 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb5sm\" (UniqueName: \"kubernetes.io/projected/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-kube-api-access-jb5sm\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.059829 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-utilities\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.060154 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-catalog-content\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.060221 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-utilities\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.083953 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb5sm\" (UniqueName: \"kubernetes.io/projected/07762bc1-f49e-4c35-8f4b-04da5c0c9adf-kube-api-access-jb5sm\") pod \"community-operators-bd2vg\" (UID: \"07762bc1-f49e-4c35-8f4b-04da5c0c9adf\") " pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.165918 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.633992 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bd2vg"] Dec 01 07:32:03 crc kubenswrapper[4632]: I1201 07:32:03.707445 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bd2vg" event={"ID":"07762bc1-f49e-4c35-8f4b-04da5c0c9adf","Type":"ContainerStarted","Data":"334ee02bc2312367d0e0338f6463198e7e6188373093dad81d299a9ecc4eb4c5"} Dec 01 07:32:04 crc kubenswrapper[4632]: I1201 07:32:04.720924 4632 generic.go:334] "Generic (PLEG): container finished" podID="07762bc1-f49e-4c35-8f4b-04da5c0c9adf" containerID="fcf1d807289da95f5a1b1027bd5ac385eec07d128ff722926182e7722c955cd2" exitCode=0 Dec 01 07:32:04 crc kubenswrapper[4632]: I1201 07:32:04.721030 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bd2vg" event={"ID":"07762bc1-f49e-4c35-8f4b-04da5c0c9adf","Type":"ContainerDied","Data":"fcf1d807289da95f5a1b1027bd5ac385eec07d128ff722926182e7722c955cd2"} Dec 01 07:32:10 crc kubenswrapper[4632]: I1201 07:32:10.828707 4632 generic.go:334] "Generic (PLEG): container finished" podID="07762bc1-f49e-4c35-8f4b-04da5c0c9adf" containerID="27519577621e663afb69bbe1fc3c65fa4db445a669a53e2bf2ff0ae44a71949f" exitCode=0 Dec 01 07:32:10 crc kubenswrapper[4632]: I1201 07:32:10.828822 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bd2vg" event={"ID":"07762bc1-f49e-4c35-8f4b-04da5c0c9adf","Type":"ContainerDied","Data":"27519577621e663afb69bbe1fc3c65fa4db445a669a53e2bf2ff0ae44a71949f"} Dec 01 07:32:11 crc kubenswrapper[4632]: I1201 07:32:11.843159 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bd2vg" event={"ID":"07762bc1-f49e-4c35-8f4b-04da5c0c9adf","Type":"ContainerStarted","Data":"7c567493447ed7f953b037c465b7f53d20fe5223061ff41ccaa7b1bd832645f1"} Dec 01 07:32:11 crc kubenswrapper[4632]: I1201 07:32:11.874184 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bd2vg" podStartSLOduration=3.236053286 podStartE2EDuration="9.874165864s" podCreationTimestamp="2025-12-01 07:32:02 +0000 UTC" firstStartedPulling="2025-12-01 07:32:04.724941787 +0000 UTC m=+2934.289954761" lastFinishedPulling="2025-12-01 07:32:11.363054365 +0000 UTC m=+2940.928067339" observedRunningTime="2025-12-01 07:32:11.865587047 +0000 UTC m=+2941.430600020" watchObservedRunningTime="2025-12-01 07:32:11.874165864 +0000 UTC m=+2941.439178837" Dec 01 07:32:13 crc kubenswrapper[4632]: I1201 07:32:13.166374 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:13 crc kubenswrapper[4632]: I1201 07:32:13.166743 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:14 crc kubenswrapper[4632]: I1201 07:32:14.212564 4632 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-bd2vg" podUID="07762bc1-f49e-4c35-8f4b-04da5c0c9adf" containerName="registry-server" probeResult="failure" output=< Dec 01 07:32:14 crc kubenswrapper[4632]: timeout: failed to connect service ":50051" within 1s Dec 01 07:32:14 crc kubenswrapper[4632]: > Dec 01 07:32:14 crc kubenswrapper[4632]: I1201 07:32:14.751749 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:32:14 crc kubenswrapper[4632]: E1201 07:32:14.752284 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.205340 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.242244 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bd2vg" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.298615 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bd2vg"] Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.443850 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.444302 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mwjfp" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="registry-server" containerID="cri-o://9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0" gracePeriod=2 Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.879270 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.958517 4632 generic.go:334] "Generic (PLEG): container finished" podID="2123ef98-d1db-4b71-8657-51be9c899a23" containerID="9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0" exitCode=0 Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.959232 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerDied","Data":"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0"} Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.959297 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mwjfp" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.959306 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mwjfp" event={"ID":"2123ef98-d1db-4b71-8657-51be9c899a23","Type":"ContainerDied","Data":"60f831ef28817d9df3f7fc1666ac87b47c47529c9e6b0d108df0d37760579c09"} Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.959333 4632 scope.go:117] "RemoveContainer" containerID="9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0" Dec 01 07:32:23 crc kubenswrapper[4632]: I1201 07:32:23.980959 4632 scope.go:117] "RemoveContainer" containerID="d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.002816 4632 scope.go:117] "RemoveContainer" containerID="cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.015765 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities\") pod \"2123ef98-d1db-4b71-8657-51be9c899a23\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.016011 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8pbbn\" (UniqueName: \"kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn\") pod \"2123ef98-d1db-4b71-8657-51be9c899a23\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.016118 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content\") pod \"2123ef98-d1db-4b71-8657-51be9c899a23\" (UID: \"2123ef98-d1db-4b71-8657-51be9c899a23\") " Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.017427 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities" (OuterVolumeSpecName: "utilities") pod "2123ef98-d1db-4b71-8657-51be9c899a23" (UID: "2123ef98-d1db-4b71-8657-51be9c899a23"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.022128 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn" (OuterVolumeSpecName: "kube-api-access-8pbbn") pod "2123ef98-d1db-4b71-8657-51be9c899a23" (UID: "2123ef98-d1db-4b71-8657-51be9c899a23"). InnerVolumeSpecName "kube-api-access-8pbbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.065576 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2123ef98-d1db-4b71-8657-51be9c899a23" (UID: "2123ef98-d1db-4b71-8657-51be9c899a23"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.074857 4632 scope.go:117] "RemoveContainer" containerID="9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0" Dec 01 07:32:24 crc kubenswrapper[4632]: E1201 07:32:24.075291 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0\": container with ID starting with 9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0 not found: ID does not exist" containerID="9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.075321 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0"} err="failed to get container status \"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0\": rpc error: code = NotFound desc = could not find container \"9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0\": container with ID starting with 9eae05e50ec6982dc7ccbb42cf8f7adfcdc23dbea9edb9e86335d577656656e0 not found: ID does not exist" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.075341 4632 scope.go:117] "RemoveContainer" containerID="d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca" Dec 01 07:32:24 crc kubenswrapper[4632]: E1201 07:32:24.075834 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca\": container with ID starting with d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca not found: ID does not exist" containerID="d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.075859 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca"} err="failed to get container status \"d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca\": rpc error: code = NotFound desc = could not find container \"d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca\": container with ID starting with d249b3997970d9dcf722a21255a6153a456b0598e898011c379302943239fcca not found: ID does not exist" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.075872 4632 scope.go:117] "RemoveContainer" containerID="cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37" Dec 01 07:32:24 crc kubenswrapper[4632]: E1201 07:32:24.076177 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37\": container with ID starting with cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37 not found: ID does not exist" containerID="cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.076220 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37"} err="failed to get container status \"cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37\": rpc error: code = NotFound desc = could not find container \"cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37\": container with ID starting with cf0b4f8171e8648994797d705052906b517f57ca6553e4a87732ad9e5ffd6a37 not found: ID does not exist" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.118182 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8pbbn\" (UniqueName: \"kubernetes.io/projected/2123ef98-d1db-4b71-8657-51be9c899a23-kube-api-access-8pbbn\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.118209 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.118225 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2123ef98-d1db-4b71-8657-51be9c899a23-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.294216 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.300378 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mwjfp"] Dec 01 07:32:24 crc kubenswrapper[4632]: I1201 07:32:24.764883 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" path="/var/lib/kubelet/pods/2123ef98-d1db-4b71-8657-51be9c899a23/volumes" Dec 01 07:32:25 crc kubenswrapper[4632]: I1201 07:32:25.750798 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:32:25 crc kubenswrapper[4632]: E1201 07:32:25.751248 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:32:37 crc kubenswrapper[4632]: I1201 07:32:37.749931 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:32:37 crc kubenswrapper[4632]: E1201 07:32:37.750947 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:32:52 crc kubenswrapper[4632]: I1201 07:32:52.751272 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:32:52 crc kubenswrapper[4632]: E1201 07:32:52.752228 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:33:05 crc kubenswrapper[4632]: I1201 07:33:05.750863 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:33:05 crc kubenswrapper[4632]: E1201 07:33:05.751766 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:33:11 crc kubenswrapper[4632]: I1201 07:33:11.412905 4632 generic.go:334] "Generic (PLEG): container finished" podID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerID="b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb" exitCode=0 Dec 01 07:33:11 crc kubenswrapper[4632]: I1201 07:33:11.413123 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-bth4h/must-gather-wtlhd" event={"ID":"51b93b9c-d69c-4fde-9d72-e906a394467e","Type":"ContainerDied","Data":"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb"} Dec 01 07:33:11 crc kubenswrapper[4632]: I1201 07:33:11.414066 4632 scope.go:117] "RemoveContainer" containerID="b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb" Dec 01 07:33:11 crc kubenswrapper[4632]: I1201 07:33:11.506055 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bth4h_must-gather-wtlhd_51b93b9c-d69c-4fde-9d72-e906a394467e/gather/0.log" Dec 01 07:33:17 crc kubenswrapper[4632]: I1201 07:33:17.750089 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:33:17 crc kubenswrapper[4632]: E1201 07:33:17.750960 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.387080 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-bth4h/must-gather-wtlhd"] Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.387428 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-bth4h/must-gather-wtlhd" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="copy" containerID="cri-o://af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0" gracePeriod=2 Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.396304 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-bth4h/must-gather-wtlhd"] Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.743268 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bth4h_must-gather-wtlhd_51b93b9c-d69c-4fde-9d72-e906a394467e/copy/0.log" Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.744041 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.923152 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwjwz\" (UniqueName: \"kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz\") pod \"51b93b9c-d69c-4fde-9d72-e906a394467e\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.923468 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output\") pod \"51b93b9c-d69c-4fde-9d72-e906a394467e\" (UID: \"51b93b9c-d69c-4fde-9d72-e906a394467e\") " Dec 01 07:33:18 crc kubenswrapper[4632]: I1201 07:33:18.928966 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz" (OuterVolumeSpecName: "kube-api-access-zwjwz") pod "51b93b9c-d69c-4fde-9d72-e906a394467e" (UID: "51b93b9c-d69c-4fde-9d72-e906a394467e"). InnerVolumeSpecName "kube-api-access-zwjwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.030187 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwjwz\" (UniqueName: \"kubernetes.io/projected/51b93b9c-d69c-4fde-9d72-e906a394467e-kube-api-access-zwjwz\") on node \"crc\" DevicePath \"\"" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.037668 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "51b93b9c-d69c-4fde-9d72-e906a394467e" (UID: "51b93b9c-d69c-4fde-9d72-e906a394467e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.131916 4632 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/51b93b9c-d69c-4fde-9d72-e906a394467e-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.484577 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-bth4h_must-gather-wtlhd_51b93b9c-d69c-4fde-9d72-e906a394467e/copy/0.log" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.485160 4632 generic.go:334] "Generic (PLEG): container finished" podID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerID="af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0" exitCode=143 Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.485284 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-bth4h/must-gather-wtlhd" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.485423 4632 scope.go:117] "RemoveContainer" containerID="af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.505403 4632 scope.go:117] "RemoveContainer" containerID="b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.561128 4632 scope.go:117] "RemoveContainer" containerID="af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0" Dec 01 07:33:19 crc kubenswrapper[4632]: E1201 07:33:19.561576 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0\": container with ID starting with af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0 not found: ID does not exist" containerID="af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.561710 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0"} err="failed to get container status \"af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0\": rpc error: code = NotFound desc = could not find container \"af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0\": container with ID starting with af6ba0caf98df9f96e49fd377f12952a4e70524db084e762f55fdfe2421d78a0 not found: ID does not exist" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.561819 4632 scope.go:117] "RemoveContainer" containerID="b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb" Dec 01 07:33:19 crc kubenswrapper[4632]: E1201 07:33:19.562241 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb\": container with ID starting with b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb not found: ID does not exist" containerID="b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb" Dec 01 07:33:19 crc kubenswrapper[4632]: I1201 07:33:19.562389 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb"} err="failed to get container status \"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb\": rpc error: code = NotFound desc = could not find container \"b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb\": container with ID starting with b8beffa25ca955d870471bfd73af074d225240c91724cc6677e65c6a85d1b2eb not found: ID does not exist" Dec 01 07:33:20 crc kubenswrapper[4632]: I1201 07:33:20.759656 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" path="/var/lib/kubelet/pods/51b93b9c-d69c-4fde-9d72-e906a394467e/volumes" Dec 01 07:33:32 crc kubenswrapper[4632]: I1201 07:33:32.750887 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:33:32 crc kubenswrapper[4632]: E1201 07:33:32.751516 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:33:45 crc kubenswrapper[4632]: I1201 07:33:45.750230 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:33:45 crc kubenswrapper[4632]: E1201 07:33:45.751166 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:33:57 crc kubenswrapper[4632]: I1201 07:33:57.750706 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:33:57 crc kubenswrapper[4632]: E1201 07:33:57.751553 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:34:10 crc kubenswrapper[4632]: I1201 07:34:10.756402 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:34:10 crc kubenswrapper[4632]: E1201 07:34:10.757277 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:34:21 crc kubenswrapper[4632]: I1201 07:34:21.750135 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:34:21 crc kubenswrapper[4632]: E1201 07:34:21.751002 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:34:34 crc kubenswrapper[4632]: I1201 07:34:34.750989 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:34:34 crc kubenswrapper[4632]: E1201 07:34:34.751709 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:34:49 crc kubenswrapper[4632]: I1201 07:34:49.750745 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:34:49 crc kubenswrapper[4632]: E1201 07:34:49.751395 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:04 crc kubenswrapper[4632]: I1201 07:35:04.750536 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:35:04 crc kubenswrapper[4632]: E1201 07:35:04.751332 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:17 crc kubenswrapper[4632]: I1201 07:35:17.750772 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:35:17 crc kubenswrapper[4632]: E1201 07:35:17.751380 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.452441 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t8g5z/must-gather-46pfq"] Dec 01 07:35:19 crc kubenswrapper[4632]: E1201 07:35:19.452964 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="registry-server" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.452977 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="registry-server" Dec 01 07:35:19 crc kubenswrapper[4632]: E1201 07:35:19.452989 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="gather" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.452995 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="gather" Dec 01 07:35:19 crc kubenswrapper[4632]: E1201 07:35:19.453013 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="extract-content" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453019 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="extract-content" Dec 01 07:35:19 crc kubenswrapper[4632]: E1201 07:35:19.453035 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="copy" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453040 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="copy" Dec 01 07:35:19 crc kubenswrapper[4632]: E1201 07:35:19.453049 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="extract-utilities" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453055 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="extract-utilities" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453209 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="copy" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453233 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="51b93b9c-d69c-4fde-9d72-e906a394467e" containerName="gather" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.453241 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="2123ef98-d1db-4b71-8657-51be9c899a23" containerName="registry-server" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.454108 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.456409 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-t8g5z"/"kube-root-ca.crt" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.456642 4632 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-t8g5z"/"default-dockercfg-q8xw4" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.456843 4632 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-t8g5z"/"openshift-service-ca.crt" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.480789 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-t8g5z/must-gather-46pfq"] Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.626577 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dhjw2\" (UniqueName: \"kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.626651 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.729400 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dhjw2\" (UniqueName: \"kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.729591 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.730091 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.745819 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dhjw2\" (UniqueName: \"kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2\") pod \"must-gather-46pfq\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:19 crc kubenswrapper[4632]: I1201 07:35:19.771591 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:35:20 crc kubenswrapper[4632]: I1201 07:35:20.220790 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-t8g5z/must-gather-46pfq"] Dec 01 07:35:20 crc kubenswrapper[4632]: I1201 07:35:20.387599 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/must-gather-46pfq" event={"ID":"6a303f25-5be2-46af-9812-5f9155a6e91c","Type":"ContainerStarted","Data":"e7831a35c272ac06ec66cd227214ba431f33d587dda1ac02ad81fa0d9187306e"} Dec 01 07:35:21 crc kubenswrapper[4632]: I1201 07:35:21.395562 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/must-gather-46pfq" event={"ID":"6a303f25-5be2-46af-9812-5f9155a6e91c","Type":"ContainerStarted","Data":"3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153"} Dec 01 07:35:21 crc kubenswrapper[4632]: I1201 07:35:21.395884 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/must-gather-46pfq" event={"ID":"6a303f25-5be2-46af-9812-5f9155a6e91c","Type":"ContainerStarted","Data":"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb"} Dec 01 07:35:21 crc kubenswrapper[4632]: I1201 07:35:21.407522 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-t8g5z/must-gather-46pfq" podStartSLOduration=2.4075085019999998 podStartE2EDuration="2.407508502s" podCreationTimestamp="2025-12-01 07:35:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:35:21.406638093 +0000 UTC m=+3130.971651066" watchObservedRunningTime="2025-12-01 07:35:21.407508502 +0000 UTC m=+3130.972521476" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.383326 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-w25pp"] Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.384717 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.415329 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vn4s\" (UniqueName: \"kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.415476 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.517758 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vn4s\" (UniqueName: \"kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.517927 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.518128 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.558014 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vn4s\" (UniqueName: \"kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s\") pod \"crc-debug-w25pp\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:23 crc kubenswrapper[4632]: I1201 07:35:23.704969 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:24 crc kubenswrapper[4632]: I1201 07:35:24.420540 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" event={"ID":"7834ed64-00bb-4f74-bef8-0bb551df5d1e","Type":"ContainerStarted","Data":"f1068111dfb555d3aa3524ce37bcef8023cd537e5d8da810a0feaacea433107e"} Dec 01 07:35:24 crc kubenswrapper[4632]: I1201 07:35:24.421256 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" event={"ID":"7834ed64-00bb-4f74-bef8-0bb551df5d1e","Type":"ContainerStarted","Data":"5607a8b818d7ac857cfe6db0d472922928d753c400f4dbf8eea09de24d818be8"} Dec 01 07:35:24 crc kubenswrapper[4632]: I1201 07:35:24.434377 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" podStartSLOduration=1.434339562 podStartE2EDuration="1.434339562s" podCreationTimestamp="2025-12-01 07:35:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-01 07:35:24.430800472 +0000 UTC m=+3133.995813436" watchObservedRunningTime="2025-12-01 07:35:24.434339562 +0000 UTC m=+3133.999352534" Dec 01 07:35:30 crc kubenswrapper[4632]: I1201 07:35:30.756576 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:35:30 crc kubenswrapper[4632]: E1201 07:35:30.760114 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:43 crc kubenswrapper[4632]: I1201 07:35:43.750500 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:35:43 crc kubenswrapper[4632]: E1201 07:35:43.751219 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:49 crc kubenswrapper[4632]: I1201 07:35:49.617935 4632 generic.go:334] "Generic (PLEG): container finished" podID="7834ed64-00bb-4f74-bef8-0bb551df5d1e" containerID="f1068111dfb555d3aa3524ce37bcef8023cd537e5d8da810a0feaacea433107e" exitCode=0 Dec 01 07:35:49 crc kubenswrapper[4632]: I1201 07:35:49.618034 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" event={"ID":"7834ed64-00bb-4f74-bef8-0bb551df5d1e","Type":"ContainerDied","Data":"f1068111dfb555d3aa3524ce37bcef8023cd537e5d8da810a0feaacea433107e"} Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.709258 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.736315 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-w25pp"] Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.742037 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-w25pp"] Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.880713 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host\") pod \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.880780 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vn4s\" (UniqueName: \"kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s\") pod \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\" (UID: \"7834ed64-00bb-4f74-bef8-0bb551df5d1e\") " Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.880846 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host" (OuterVolumeSpecName: "host") pod "7834ed64-00bb-4f74-bef8-0bb551df5d1e" (UID: "7834ed64-00bb-4f74-bef8-0bb551df5d1e"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.881771 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7834ed64-00bb-4f74-bef8-0bb551df5d1e-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.887026 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s" (OuterVolumeSpecName: "kube-api-access-9vn4s") pod "7834ed64-00bb-4f74-bef8-0bb551df5d1e" (UID: "7834ed64-00bb-4f74-bef8-0bb551df5d1e"). InnerVolumeSpecName "kube-api-access-9vn4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:35:50 crc kubenswrapper[4632]: I1201 07:35:50.984607 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vn4s\" (UniqueName: \"kubernetes.io/projected/7834ed64-00bb-4f74-bef8-0bb551df5d1e-kube-api-access-9vn4s\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.642102 4632 scope.go:117] "RemoveContainer" containerID="f1068111dfb555d3aa3524ce37bcef8023cd537e5d8da810a0feaacea433107e" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.642201 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-w25pp" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.908989 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-blgrj"] Dec 01 07:35:51 crc kubenswrapper[4632]: E1201 07:35:51.909549 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7834ed64-00bb-4f74-bef8-0bb551df5d1e" containerName="container-00" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.909563 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="7834ed64-00bb-4f74-bef8-0bb551df5d1e" containerName="container-00" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.909793 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="7834ed64-00bb-4f74-bef8-0bb551df5d1e" containerName="container-00" Dec 01 07:35:51 crc kubenswrapper[4632]: I1201 07:35:51.910506 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.010385 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.010540 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jmb6\" (UniqueName: \"kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.112824 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.112918 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jmb6\" (UniqueName: \"kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.112970 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.130274 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jmb6\" (UniqueName: \"kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6\") pod \"crc-debug-blgrj\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.228101 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.650609 4632 generic.go:334] "Generic (PLEG): container finished" podID="29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" containerID="ad3d6a0a0e88cf95a237e8f79577e25ffd77344b39db66738049cdcad16ed61b" exitCode=0 Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.650685 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" event={"ID":"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85","Type":"ContainerDied","Data":"ad3d6a0a0e88cf95a237e8f79577e25ffd77344b39db66738049cdcad16ed61b"} Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.650747 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" event={"ID":"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85","Type":"ContainerStarted","Data":"729c6c5d0b61e3c2e07b92cc03d6eb11fcc92bafeef67f0872344fcbc75b6b14"} Dec 01 07:35:52 crc kubenswrapper[4632]: I1201 07:35:52.760328 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7834ed64-00bb-4f74-bef8-0bb551df5d1e" path="/var/lib/kubelet/pods/7834ed64-00bb-4f74-bef8-0bb551df5d1e/volumes" Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.068146 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-blgrj"] Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.075068 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-blgrj"] Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.734140 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.745084 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host\") pod \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.745280 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host" (OuterVolumeSpecName: "host") pod "29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" (UID: "29d3ac18-171e-4e6a-af2e-6cd01b8f1d85"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.745329 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jmb6\" (UniqueName: \"kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6\") pod \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\" (UID: \"29d3ac18-171e-4e6a-af2e-6cd01b8f1d85\") " Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.746136 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.756594 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6" (OuterVolumeSpecName: "kube-api-access-5jmb6") pod "29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" (UID: "29d3ac18-171e-4e6a-af2e-6cd01b8f1d85"). InnerVolumeSpecName "kube-api-access-5jmb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:35:53 crc kubenswrapper[4632]: I1201 07:35:53.848034 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jmb6\" (UniqueName: \"kubernetes.io/projected/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85-kube-api-access-5jmb6\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.237901 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-5g6c2"] Dec 01 07:35:54 crc kubenswrapper[4632]: E1201 07:35:54.238533 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" containerName="container-00" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.238554 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" containerName="container-00" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.238793 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" containerName="container-00" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.239650 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.257412 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqvxn\" (UniqueName: \"kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.257680 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.359957 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqvxn\" (UniqueName: \"kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.360148 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.360239 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.379200 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqvxn\" (UniqueName: \"kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn\") pod \"crc-debug-5g6c2\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.555589 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.668443 4632 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="729c6c5d0b61e3c2e07b92cc03d6eb11fcc92bafeef67f0872344fcbc75b6b14" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.668646 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-blgrj" Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.669744 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" event={"ID":"91cadb4a-922a-4402-b7f2-30897f6fa8fe","Type":"ContainerStarted","Data":"f0787da08cd53fef410c05aa5a1ff38ff6ea92968d193a98c5062c9f841d174b"} Dec 01 07:35:54 crc kubenswrapper[4632]: I1201 07:35:54.765630 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29d3ac18-171e-4e6a-af2e-6cd01b8f1d85" path="/var/lib/kubelet/pods/29d3ac18-171e-4e6a-af2e-6cd01b8f1d85/volumes" Dec 01 07:35:55 crc kubenswrapper[4632]: I1201 07:35:55.679810 4632 generic.go:334] "Generic (PLEG): container finished" podID="91cadb4a-922a-4402-b7f2-30897f6fa8fe" containerID="e44d568fb2ad405161abc61ab7639b8fe6413d2cb352d33f777ff8fe63722150" exitCode=0 Dec 01 07:35:55 crc kubenswrapper[4632]: I1201 07:35:55.679860 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" event={"ID":"91cadb4a-922a-4402-b7f2-30897f6fa8fe","Type":"ContainerDied","Data":"e44d568fb2ad405161abc61ab7639b8fe6413d2cb352d33f777ff8fe63722150"} Dec 01 07:35:55 crc kubenswrapper[4632]: I1201 07:35:55.721720 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-5g6c2"] Dec 01 07:35:55 crc kubenswrapper[4632]: I1201 07:35:55.729920 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t8g5z/crc-debug-5g6c2"] Dec 01 07:35:55 crc kubenswrapper[4632]: I1201 07:35:55.750342 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:35:55 crc kubenswrapper[4632]: E1201 07:35:55.750728 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.758955 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.908852 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host\") pod \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.909003 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqvxn\" (UniqueName: \"kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn\") pod \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\" (UID: \"91cadb4a-922a-4402-b7f2-30897f6fa8fe\") " Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.908993 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host" (OuterVolumeSpecName: "host") pod "91cadb4a-922a-4402-b7f2-30897f6fa8fe" (UID: "91cadb4a-922a-4402-b7f2-30897f6fa8fe"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.909604 4632 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/91cadb4a-922a-4402-b7f2-30897f6fa8fe-host\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:56 crc kubenswrapper[4632]: I1201 07:35:56.914334 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn" (OuterVolumeSpecName: "kube-api-access-zqvxn") pod "91cadb4a-922a-4402-b7f2-30897f6fa8fe" (UID: "91cadb4a-922a-4402-b7f2-30897f6fa8fe"). InnerVolumeSpecName "kube-api-access-zqvxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:35:57 crc kubenswrapper[4632]: I1201 07:35:57.011518 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqvxn\" (UniqueName: \"kubernetes.io/projected/91cadb4a-922a-4402-b7f2-30897f6fa8fe-kube-api-access-zqvxn\") on node \"crc\" DevicePath \"\"" Dec 01 07:35:57 crc kubenswrapper[4632]: I1201 07:35:57.697113 4632 scope.go:117] "RemoveContainer" containerID="e44d568fb2ad405161abc61ab7639b8fe6413d2cb352d33f777ff8fe63722150" Dec 01 07:35:57 crc kubenswrapper[4632]: I1201 07:35:57.697130 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/crc-debug-5g6c2" Dec 01 07:35:58 crc kubenswrapper[4632]: I1201 07:35:58.757475 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91cadb4a-922a-4402-b7f2-30897f6fa8fe" path="/var/lib/kubelet/pods/91cadb4a-922a-4402-b7f2-30897f6fa8fe/volumes" Dec 01 07:36:09 crc kubenswrapper[4632]: I1201 07:36:09.750520 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:36:09 crc kubenswrapper[4632]: E1201 07:36:09.751236 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:36:15 crc kubenswrapper[4632]: I1201 07:36:15.844652 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5cb7c5c954-lttm2_d67e2874-71ae-4106-80fd-5361439b8ea5/barbican-api/0.log" Dec 01 07:36:15 crc kubenswrapper[4632]: I1201 07:36:15.933852 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5cb7c5c954-lttm2_d67e2874-71ae-4106-80fd-5361439b8ea5/barbican-api-log/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.005526 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b4798-zm5jf_16fb6740-33c0-4a6d-8711-34f7520087a5/barbican-keystone-listener/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.024755 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b4798-zm5jf_16fb6740-33c0-4a6d-8711-34f7520087a5/barbican-keystone-listener-log/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.136160 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5cffc97f9c-677mp_aec57a49-c244-4fad-81c2-b29649e62945/barbican-worker/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.178290 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5cffc97f9c-677mp_aec57a49-c244-4fad-81c2-b29649e62945/barbican-worker-log/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.306663 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-hqn5w_0fa69e48-53f9-4bb5-9e11-a9afde0d8912/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.371594 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/ceilometer-central-agent/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.412398 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/ceilometer-notification-agent/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.477195 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/proxy-httpd/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.513962 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_1c26be38-a4fd-4756-ab66-7f0ef9d8cbc9/sg-core/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.599036 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3/cinder-api/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.649305 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_0fe0cba0-4ef0-4e11-8f54-29fa4b4518b3/cinder-api-log/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.785636 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_08847455-e239-4d88-ba2d-0e17255fcaa3/probe/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.793334 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_08847455-e239-4d88-ba2d-0e17255fcaa3/cinder-scheduler/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.931455 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-6p5pw_3cb46d14-bcaa-4c50-99c0-5d6693557f5d/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:16 crc kubenswrapper[4632]: I1201 07:36:16.961961 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-bp886_c30c10f4-9c67-4caf-8858-a2e74307ee33/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.099670 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/init/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.225324 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/init/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.263172 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6cd8587fff-2wtcz_940ceb01-c88a-4012-bd18-d87ef90d7549/dnsmasq-dns/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.279669 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-l89fl_6f79d230-5b05-468d-bf19-bb6a792c6b5d/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.440156 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e8ac0c6b-4bf6-4259-bad6-9c0620047334/glance-httpd/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.467040 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e8ac0c6b-4bf6-4259-bad6-9c0620047334/glance-log/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.616199 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3524161d-d124-4b04-9b25-73e2e3188c7f/glance-log/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.652570 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3524161d-d124-4b04-9b25-73e2e3188c7f/glance-httpd/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.668481 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-44xkb_252ea9f1-a749-4524-9a53-dffbad624ea7/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.810815 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-9frjq_2421bc45-8d08-4634-861d-e3f185b01e54/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.963540 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-5d8c896fc4-b4shb_ebd84a7e-560e-4bc0-b3e7-2f2c0843d789/keystone-api/0.log" Dec 01 07:36:17 crc kubenswrapper[4632]: I1201 07:36:17.992280 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29409541-h825b_9b87ff38-8ea4-4e1e-8553-aa3069f0223a/keystone-cron/0.log" Dec 01 07:36:18 crc kubenswrapper[4632]: I1201 07:36:18.135722 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_7ada4ec3-07bb-43f8-9d48-30d2075314d0/kube-state-metrics/0.log" Dec 01 07:36:18 crc kubenswrapper[4632]: I1201 07:36:18.240193 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-ll2vq_a2462a74-2ab0-47cc-9bed-77ce67b0a6c5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:18 crc kubenswrapper[4632]: I1201 07:36:18.502223 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594d88dfbf-66tbw_dd0f52ca-bba5-4410-9473-ac86c9839cf6/neutron-httpd/0.log" Dec 01 07:36:18 crc kubenswrapper[4632]: I1201 07:36:18.609939 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-594d88dfbf-66tbw_dd0f52ca-bba5-4410-9473-ac86c9839cf6/neutron-api/0.log" Dec 01 07:36:18 crc kubenswrapper[4632]: I1201 07:36:18.652864 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-dpzmd_1ca8649a-8e09-4edd-9f1f-72277996e08d/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.083888 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9565ad41-8a6b-461d-8299-e8fe256d30eb/nova-api-log/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.111439 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_a3304516-deb9-4715-a501-c0b1dbb89945/nova-cell0-conductor-conductor/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.322820 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_9565ad41-8a6b-461d-8299-e8fe256d30eb/nova-api-api/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.431138 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_da84cfe8-1321-40a1-a05b-14194e1e7d48/nova-cell1-conductor-conductor/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.434923 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_c9c1ad02-ff5b-4f40-8e92-00ceb5acc4fc/nova-cell1-novncproxy-novncproxy/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.566956 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-t9nbd_a4cb69cd-b4b1-4f58-9553-27564432b39c/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:19 crc kubenswrapper[4632]: I1201 07:36:19.837449 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8beb9f92-1512-4843-a060-e7407372d147/nova-metadata-log/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.022142 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/mysql-bootstrap/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.143889 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_ec63fa13-7856-421c-ab7f-7281a42a6d67/nova-scheduler-scheduler/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.199574 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/mysql-bootstrap/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.248332 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_d73ee8ba-1384-40b9-bbe8-62425cd044db/galera/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.412454 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/mysql-bootstrap/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.651844 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/galera/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.679974 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_ecd36c5b-96fc-49af-b8f2-634fcf854cfa/mysql-bootstrap/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.847617 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_c29a6239-304a-4a40-8e32-35dfb513bb8f/openstackclient/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.866265 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_8beb9f92-1512-4843-a060-e7407372d147/nova-metadata-metadata/0.log" Dec 01 07:36:20 crc kubenswrapper[4632]: I1201 07:36:20.936278 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-hw5x4_fe332539-435d-44e0-bcf5-c47332ed1e55/ovn-controller/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.067983 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-qkjhz_e2d9748c-3d24-43dd-a125-3a20cfe296e2/openstack-network-exporter/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.131619 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server-init/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.303575 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server-init/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.311758 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovs-vswitchd/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.332262 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-rx6vw_e22d1e7d-958f-4e02-911b-31f513dd9802/ovsdb-server/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.504586 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0182dc49-3707-4d2e-a867-5eb37db588f8/openstack-network-exporter/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.528121 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-r4jjj_a1e089cc-f5f6-476a-af14-d25cd1150efd/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.538456 4632 scope.go:117] "RemoveContainer" containerID="66f7c71611d27844b52a567cb80f2eb99d595dd4097188a14435e95454e7dff3" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.642079 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_0182dc49-3707-4d2e-a867-5eb37db588f8/ovn-northd/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.708171 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f0bb9103-afbb-45ea-9427-c4925dd007c9/ovsdbserver-nb/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.753494 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f0bb9103-afbb-45ea-9427-c4925dd007c9/openstack-network-exporter/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.874518 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c5f52c6d-0a10-43a9-84f8-940c156f3278/openstack-network-exporter/0.log" Dec 01 07:36:21 crc kubenswrapper[4632]: I1201 07:36:21.953638 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c5f52c6d-0a10-43a9-84f8-940c156f3278/ovsdbserver-sb/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.072169 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8445898876-gptmm_0f2d46d4-637f-441b-8710-f1d82d8a0c11/placement-api/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.136547 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/setup-container/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.183024 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-8445898876-gptmm_0f2d46d4-637f-441b-8710-f1d82d8a0c11/placement-log/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.336337 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/setup-container/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.347381 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_6fa564e2-c2c4-41f0-aa84-1431574a0a4b/rabbitmq/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.432175 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/setup-container/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.595507 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/rabbitmq/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.607048 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_02ca2059-fc5b-4d54-886b-e6de4f303d3b/setup-container/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.639145 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-djnvx_c9a462bc-acd6-4d48-b78b-3584fdb57851/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.800883 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-fx2pb_29eb2b86-6594-44a0-a146-073da23a9341/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.823453 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-kxzwd_c57a7c91-453f-4fce-8410-abffb3ffe651/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:22 crc kubenswrapper[4632]: I1201 07:36:22.976713 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-n5wcr_3debc8c5-6f78-44c9-9f2d-4207eeec3b11/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.014660 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-bqvjq_6e818cb4-7d89-4c61-8bd4-8b15b748ed38/ssh-known-hosts-edpm-deployment/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.247588 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7697b7f499-t6njt_e090ffa7-e9ce-46e9-97e8-8e38155d9241/proxy-server/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.310606 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-7697b7f499-t6njt_e090ffa7-e9ce-46e9-97e8-8e38155d9241/proxy-httpd/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.405648 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-j5jrs_6efb3189-8101-4364-93b9-d31c87b9fe71/swift-ring-rebalance/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.490817 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-auditor/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.492215 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-reaper/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.622467 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-replicator/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.636048 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/account-server/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.663619 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-auditor/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.713041 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-replicator/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.749767 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.832696 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-server/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.844943 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-auditor/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.868875 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/container-updater/0.log" Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.937863 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2"} Dec 01 07:36:23 crc kubenswrapper[4632]: I1201 07:36:23.979216 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-expirer/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.039999 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-server/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.074980 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-replicator/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.147812 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/object-updater/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.159584 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/rsync/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.265488 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_b136b809-94ef-4a5e-86b6-d3652e7ce987/swift-recon-cron/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.396606 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-42lvs_eb82c6b3-a652-4d30-a8c9-63f6878557cc/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.430409 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_713aa788-d673-4113-93f4-760c3d3714cc/tempest-tests-tempest-tests-runner/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.594674 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_b7e83dda-e4f1-4a62-b293-730dcafe5d39/test-operator-logs-container/0.log" Dec 01 07:36:24 crc kubenswrapper[4632]: I1201 07:36:24.611937 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-znccl_af7575c1-47ca-4c63-bdca-1a42d23485ee/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 01 07:36:34 crc kubenswrapper[4632]: I1201 07:36:34.909858 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_df701889-0ecf-4452-8689-40cc4c4de347/memcached/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.390677 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.543774 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.558288 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.580782 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.734119 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/pull/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.739380 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/util/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.776650 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_34453e0ae93d07abc4f6e497f8998de77c1bdd8f20510be6b58912cf3bhchnn_92427059-853f-4a13-8994-5cdf75d48aec/extract/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.920674 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-wpwjc_3ad25430-83fc-45b0-83b1-adbe4e729508/kube-rbac-proxy/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.934985 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-wpwjc_3ad25430-83fc-45b0-83b1-adbe4e729508/manager/0.log" Dec 01 07:36:46 crc kubenswrapper[4632]: I1201 07:36:46.961437 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-4lrqg_fb574298-9e57-474c-9f80-faa7be6cded8/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.111038 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-fxrdf_c5aba6fe-c38f-45ec-8057-a19b2636fe68/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.115307 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-4lrqg_fb574298-9e57-474c-9f80-faa7be6cded8/manager/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.279885 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-fxrdf_c5aba6fe-c38f-45ec-8057-a19b2636fe68/manager/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.381631 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-k89zc_0bb7b633-65c0-4c4e-9fad-648fd779ff4a/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.503458 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-668d9c48b9-k89zc_0bb7b633-65c0-4c4e-9fad-648fd779ff4a/manager/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.543194 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xjrkn_749f148d-477b-4186-8c5a-ea9f86e4a64b/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.585079 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-xjrkn_749f148d-477b-4186-8c5a-ea9f86e4a64b/manager/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.693086 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-knsc7_c470b4eb-c3ca-4117-89ec-5812e4cbcec1/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.718565 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-knsc7_c470b4eb-c3ca-4117-89ec-5812e4cbcec1/manager/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.841603 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bbrm4_731aedb5-2e95-4d08-9a4e-6c27e64d5ea7/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.910634 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hwjzz_ad78f229-4425-4bc0-9721-fcf6c2a067d7/kube-rbac-proxy/0.log" Dec 01 07:36:47 crc kubenswrapper[4632]: I1201 07:36:47.987829 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-bbrm4_731aedb5-2e95-4d08-9a4e-6c27e64d5ea7/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.037072 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-hwjzz_ad78f229-4425-4bc0-9721-fcf6c2a067d7/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.092464 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-7ct9v_f191cde7-969a-4111-86cf-855623533060/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.194408 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-546d4bdf48-7ct9v_f191cde7-969a-4111-86cf-855623533060/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.271719 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-fcwbl_768788a8-025e-4e79-a0ec-6bb23a14f72e/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.274198 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6546668bfd-fcwbl_768788a8-025e-4e79-a0ec-6bb23a14f72e/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.380460 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-r9gx9_35336d69-2a15-4513-970c-19e86cbb339f/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.442374 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-r9gx9_35336d69-2a15-4513-970c-19e86cbb339f/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.521884 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_254c38bb-3a55-426d-a497-69b3aa16c639/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.579413 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-wnk9x_254c38bb-3a55-426d-a497-69b3aa16c639/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.682577 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pdsxw_c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.773366 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-pdsxw_c1fe7e47-1a47-4f9b-b0a3-cbb08a5cce73/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.815428 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-g2j5h_0d6924f1-38a5-434e-99b6-9f9a06ae0894/kube-rbac-proxy/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.860592 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-g2j5h_0d6924f1-38a5-434e-99b6-9f9a06ae0894/manager/0.log" Dec 01 07:36:48 crc kubenswrapper[4632]: I1201 07:36:48.968081 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446x48x8_05397964-6686-490a-ab73-ec535a262794/kube-rbac-proxy/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.053127 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6698bcb446x48x8_05397964-6686-490a-ab73-ec535a262794/manager/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.616952 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6ddddd9d6f-p8vpd_fc9f0281-8d41-469a-b0f1-2b9f20245a43/operator/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.702230 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-x4g48_33f69e4f-eb93-4113-80d0-b50fdc5a83f7/registry-server/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.710231 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-7v8l7_ecb68de8-b267-4c69-baf4-078e3feacf8e/kube-rbac-proxy/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.887109 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-x2k78_a9952ac0-b2d4-4717-823b-5f9f0338fb5f/kube-rbac-proxy/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.908557 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-7v8l7_ecb68de8-b267-4c69-baf4-078e3feacf8e/manager/0.log" Dec 01 07:36:49 crc kubenswrapper[4632]: I1201 07:36:49.937556 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-x2k78_a9952ac0-b2d4-4717-823b-5f9f0338fb5f/manager/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.129155 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-7jjwb_ce060aca-e2c3-4454-b126-719a572ece48/operator/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.240615 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-674sk_9744f748-86b6-417c-ab38-18cc3ad9b89a/kube-rbac-proxy/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.356401 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-656fd97d56-dfqrf_0b8afb8f-7d54-43b7-80ec-ff5e2ee806e9/manager/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.377264 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-674sk_9744f748-86b6-417c-ab38-18cc3ad9b89a/manager/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.456748 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-x2252_697c8fad-c587-41ce-ae4a-158bb22b6394/kube-rbac-proxy/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.487750 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-x2252_697c8fad-c587-41ce-ae4a-158bb22b6394/manager/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.580257 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4hqhb_5640bad0-ba52-4bc4-845d-d47987318155/manager/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.586022 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4hqhb_5640bad0-ba52-4bc4-845d-d47987318155/kube-rbac-proxy/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.667823 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-zkfsl_756b1531-b2e5-4a10-aad8-ae2378b09a68/kube-rbac-proxy/0.log" Dec 01 07:36:50 crc kubenswrapper[4632]: I1201 07:36:50.724977 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-zkfsl_756b1531-b2e5-4a10-aad8-ae2378b09a68/manager/0.log" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.740628 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:36:55 crc kubenswrapper[4632]: E1201 07:36:55.741324 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91cadb4a-922a-4402-b7f2-30897f6fa8fe" containerName="container-00" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.741340 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="91cadb4a-922a-4402-b7f2-30897f6fa8fe" containerName="container-00" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.741554 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="91cadb4a-922a-4402-b7f2-30897f6fa8fe" containerName="container-00" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.742922 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.761432 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.791429 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.792580 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7hr2\" (UniqueName: \"kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.792983 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.895165 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.895289 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7hr2\" (UniqueName: \"kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.895403 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.895734 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.895824 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:55 crc kubenswrapper[4632]: I1201 07:36:55.914526 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7hr2\" (UniqueName: \"kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2\") pod \"certified-operators-d42lb\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:56 crc kubenswrapper[4632]: I1201 07:36:56.061681 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:36:56 crc kubenswrapper[4632]: I1201 07:36:56.539001 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:36:57 crc kubenswrapper[4632]: I1201 07:36:57.238661 4632 generic.go:334] "Generic (PLEG): container finished" podID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerID="b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f" exitCode=0 Dec 01 07:36:57 crc kubenswrapper[4632]: I1201 07:36:57.238721 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerDied","Data":"b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f"} Dec 01 07:36:57 crc kubenswrapper[4632]: I1201 07:36:57.238985 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerStarted","Data":"4511dc4e43678595f86cbcb300a8065e0f6fb994640306cff1f4048a71e1a97d"} Dec 01 07:36:57 crc kubenswrapper[4632]: I1201 07:36:57.240928 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:36:58 crc kubenswrapper[4632]: I1201 07:36:58.248905 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerStarted","Data":"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0"} Dec 01 07:36:59 crc kubenswrapper[4632]: I1201 07:36:59.258080 4632 generic.go:334] "Generic (PLEG): container finished" podID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerID="3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0" exitCode=0 Dec 01 07:36:59 crc kubenswrapper[4632]: I1201 07:36:59.258167 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerDied","Data":"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0"} Dec 01 07:37:00 crc kubenswrapper[4632]: I1201 07:37:00.275067 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerStarted","Data":"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103"} Dec 01 07:37:00 crc kubenswrapper[4632]: I1201 07:37:00.300177 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-d42lb" podStartSLOduration=2.751296836 podStartE2EDuration="5.300154918s" podCreationTimestamp="2025-12-01 07:36:55 +0000 UTC" firstStartedPulling="2025-12-01 07:36:57.240665653 +0000 UTC m=+3226.805678626" lastFinishedPulling="2025-12-01 07:36:59.789523736 +0000 UTC m=+3229.354536708" observedRunningTime="2025-12-01 07:37:00.293004903 +0000 UTC m=+3229.858017876" watchObservedRunningTime="2025-12-01 07:37:00.300154918 +0000 UTC m=+3229.865167891" Dec 01 07:37:06 crc kubenswrapper[4632]: I1201 07:37:06.062386 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:06 crc kubenswrapper[4632]: I1201 07:37:06.062948 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:06 crc kubenswrapper[4632]: I1201 07:37:06.097766 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:06 crc kubenswrapper[4632]: I1201 07:37:06.369059 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:07 crc kubenswrapper[4632]: I1201 07:37:07.045804 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-mxcl4_e354731f-7ed1-47e3-8d64-7d55f1613100/control-plane-machine-set-operator/0.log" Dec 01 07:37:07 crc kubenswrapper[4632]: I1201 07:37:07.155104 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g99wc_e90e3510-a941-487a-af96-639fdc977fbb/kube-rbac-proxy/0.log" Dec 01 07:37:07 crc kubenswrapper[4632]: I1201 07:37:07.176701 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-g99wc_e90e3510-a941-487a-af96-639fdc977fbb/machine-api-operator/0.log" Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.134048 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.351529 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-d42lb" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="registry-server" containerID="cri-o://5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103" gracePeriod=2 Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.749275 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.894993 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content\") pod \"77414e08-7005-4510-8c7b-e17c36f81cb7\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.895250 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities\") pod \"77414e08-7005-4510-8c7b-e17c36f81cb7\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.895309 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7hr2\" (UniqueName: \"kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2\") pod \"77414e08-7005-4510-8c7b-e17c36f81cb7\" (UID: \"77414e08-7005-4510-8c7b-e17c36f81cb7\") " Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.895890 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities" (OuterVolumeSpecName: "utilities") pod "77414e08-7005-4510-8c7b-e17c36f81cb7" (UID: "77414e08-7005-4510-8c7b-e17c36f81cb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.896471 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.900538 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2" (OuterVolumeSpecName: "kube-api-access-m7hr2") pod "77414e08-7005-4510-8c7b-e17c36f81cb7" (UID: "77414e08-7005-4510-8c7b-e17c36f81cb7"). InnerVolumeSpecName "kube-api-access-m7hr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:37:08 crc kubenswrapper[4632]: I1201 07:37:08.932036 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77414e08-7005-4510-8c7b-e17c36f81cb7" (UID: "77414e08-7005-4510-8c7b-e17c36f81cb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:08.999735 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77414e08-7005-4510-8c7b-e17c36f81cb7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.000133 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7hr2\" (UniqueName: \"kubernetes.io/projected/77414e08-7005-4510-8c7b-e17c36f81cb7-kube-api-access-m7hr2\") on node \"crc\" DevicePath \"\"" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.360917 4632 generic.go:334] "Generic (PLEG): container finished" podID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerID="5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103" exitCode=0 Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.360980 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerDied","Data":"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103"} Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.361014 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-d42lb" event={"ID":"77414e08-7005-4510-8c7b-e17c36f81cb7","Type":"ContainerDied","Data":"4511dc4e43678595f86cbcb300a8065e0f6fb994640306cff1f4048a71e1a97d"} Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.361050 4632 scope.go:117] "RemoveContainer" containerID="5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.361223 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-d42lb" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.386453 4632 scope.go:117] "RemoveContainer" containerID="3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.401512 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.408526 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-d42lb"] Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.421899 4632 scope.go:117] "RemoveContainer" containerID="b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.445645 4632 scope.go:117] "RemoveContainer" containerID="5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103" Dec 01 07:37:09 crc kubenswrapper[4632]: E1201 07:37:09.446205 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103\": container with ID starting with 5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103 not found: ID does not exist" containerID="5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.446253 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103"} err="failed to get container status \"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103\": rpc error: code = NotFound desc = could not find container \"5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103\": container with ID starting with 5ac1a285371ba1373568a2a7a4d6977cc5fff6417daedab37686b5ad70bf5103 not found: ID does not exist" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.446284 4632 scope.go:117] "RemoveContainer" containerID="3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0" Dec 01 07:37:09 crc kubenswrapper[4632]: E1201 07:37:09.446647 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0\": container with ID starting with 3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0 not found: ID does not exist" containerID="3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.446751 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0"} err="failed to get container status \"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0\": rpc error: code = NotFound desc = could not find container \"3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0\": container with ID starting with 3f6f5e9eb2f1b738f7fb80838efe462558e7c69a8e2d8aea7879b19d4b977bd0 not found: ID does not exist" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.446825 4632 scope.go:117] "RemoveContainer" containerID="b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f" Dec 01 07:37:09 crc kubenswrapper[4632]: E1201 07:37:09.447161 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f\": container with ID starting with b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f not found: ID does not exist" containerID="b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f" Dec 01 07:37:09 crc kubenswrapper[4632]: I1201 07:37:09.447185 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f"} err="failed to get container status \"b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f\": rpc error: code = NotFound desc = could not find container \"b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f\": container with ID starting with b539a3459e8a04e0d83725c76652f9deef12b8bb9b88947c3baa08ab1c7bfb8f not found: ID does not exist" Dec 01 07:37:10 crc kubenswrapper[4632]: I1201 07:37:10.763013 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" path="/var/lib/kubelet/pods/77414e08-7005-4510-8c7b-e17c36f81cb7/volumes" Dec 01 07:37:17 crc kubenswrapper[4632]: I1201 07:37:17.393872 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-dntsb_15ee763c-142f-480b-92b5-6360ed211e21/cert-manager-controller/0.log" Dec 01 07:37:17 crc kubenswrapper[4632]: I1201 07:37:17.565594 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-jmgck_91c9fab4-eb3d-4b68-bf2b-31d5f17c6c0a/cert-manager-cainjector/0.log" Dec 01 07:37:17 crc kubenswrapper[4632]: I1201 07:37:17.571046 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wjfb2_05af64fb-260a-44bb-a7e8-e3b8ffbee656/cert-manager-webhook/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.008444 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-m9dcd_6d66d298-9ddf-440c-ace2-14c38dc309b0/nmstate-console-plugin/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.155734 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-qjq94_f517f423-af69-4a25-a169-e71268fa0ca3/nmstate-handler/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.179755 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ngl6v_3b03b7ed-47ab-4ae0-95e7-ed1c830fe065/kube-rbac-proxy/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.206611 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ngl6v_3b03b7ed-47ab-4ae0-95e7-ed1c830fe065/nmstate-metrics/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.306838 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-w7h9m_7fcf49f3-19eb-41a7-b095-ceea8b76f9bd/nmstate-operator/0.log" Dec 01 07:37:27 crc kubenswrapper[4632]: I1201 07:37:27.402164 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-hmmp7_70be3201-d4ba-4c07-950e-527ad7d2024d/nmstate-webhook/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.071124 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-knsfp_e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe/kube-rbac-proxy/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.129617 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-knsfp_e2dcb1c6-c676-46e9-85b4-c0a3deb9e2fe/controller/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.135932 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.351571 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.352084 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.352223 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.358775 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.624038 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.650405 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.678902 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.682681 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.796007 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-frr-files/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.825945 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-metrics/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.830703 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/cp-reloader/0.log" Dec 01 07:37:39 crc kubenswrapper[4632]: I1201 07:37:39.858417 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/controller/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.003819 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/kube-rbac-proxy/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.006881 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/frr-metrics/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.028059 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/kube-rbac-proxy-frr/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.168926 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/reloader/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.205856 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-z9drr_1da19bfe-a759-44f5-9839-b638c45f84b8/frr-k8s-webhook-server/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.358271 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-5cdb7cf54d-h7wfl_4cceec72-bb6e-43a4-8b98-8077e45f281c/manager/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.539218 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-696b4c485-xvdlk_c28a59da-1614-46fc-9ece-a4c888e9c53c/webhook-server/0.log" Dec 01 07:37:40 crc kubenswrapper[4632]: I1201 07:37:40.606016 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rld7v_1f8d778c-51f1-4fed-8f3a-34bd3f603d06/kube-rbac-proxy/0.log" Dec 01 07:37:41 crc kubenswrapper[4632]: I1201 07:37:41.082631 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-rld7v_1f8d778c-51f1-4fed-8f3a-34bd3f603d06/speaker/0.log" Dec 01 07:37:41 crc kubenswrapper[4632]: I1201 07:37:41.177809 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-7nr8t_9ef9114d-cef9-41d1-ae4f-be2037eebd1e/frr/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.067049 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.267133 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.281198 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.287044 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.469989 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/extract/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.472397 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.490995 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fhxmxx_a2c9f938-68bc-4efd-8547-45136745c6c6/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.643174 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.753507 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.753750 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.781609 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.927640 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/util/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.933563 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/pull/0.log" Dec 01 07:37:51 crc kubenswrapper[4632]: I1201 07:37:51.942291 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83g74qt_f5f12f82-cb0c-4db9-bb02-4da44d980004/extract/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.079504 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.215890 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.231664 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.243832 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.358018 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.394337 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/extract-content/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.540535 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.741180 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-l4w7t_de997f32-b849-4dcd-96e7-de56cfa3ec3d/registry-server/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.772080 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-content/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.793464 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.814495 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-content/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.936922 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-utilities/0.log" Dec 01 07:37:52 crc kubenswrapper[4632]: I1201 07:37:52.944691 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/extract-content/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.069959 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-bd2vg_07762bc1-f49e-4c35-8f4b-04da5c0c9adf/registry-server/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.109755 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-crm4g_5afb9da5-167e-47cf-80fe-e9365ec939fd/marketplace-operator/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.160948 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.349471 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.371098 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.371940 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.522073 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-utilities/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.530436 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/extract-content/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.682492 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r52z6_1468035d-a3bd-4465-9fee-f27f6f7d4d7e/registry-server/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.718005 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.836926 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.854908 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:37:53 crc kubenswrapper[4632]: I1201 07:37:53.864343 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:37:54 crc kubenswrapper[4632]: I1201 07:37:54.008903 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-content/0.log" Dec 01 07:37:54 crc kubenswrapper[4632]: I1201 07:37:54.009796 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/extract-utilities/0.log" Dec 01 07:37:54 crc kubenswrapper[4632]: I1201 07:37:54.379037 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-t667l_07f1738b-a57a-41f6-909f-0f830c165731/registry-server/0.log" Dec 01 07:38:49 crc kubenswrapper[4632]: I1201 07:38:49.498150 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:38:49 crc kubenswrapper[4632]: I1201 07:38:49.498771 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:39:16 crc kubenswrapper[4632]: I1201 07:39:16.412775 4632 generic.go:334] "Generic (PLEG): container finished" podID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerID="85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb" exitCode=0 Dec 01 07:39:16 crc kubenswrapper[4632]: I1201 07:39:16.412870 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-t8g5z/must-gather-46pfq" event={"ID":"6a303f25-5be2-46af-9812-5f9155a6e91c","Type":"ContainerDied","Data":"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb"} Dec 01 07:39:16 crc kubenswrapper[4632]: I1201 07:39:16.414241 4632 scope.go:117] "RemoveContainer" containerID="85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb" Dec 01 07:39:16 crc kubenswrapper[4632]: I1201 07:39:16.689254 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t8g5z_must-gather-46pfq_6a303f25-5be2-46af-9812-5f9155a6e91c/gather/0.log" Dec 01 07:39:19 crc kubenswrapper[4632]: I1201 07:39:19.498368 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:39:19 crc kubenswrapper[4632]: I1201 07:39:19.498754 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:39:25 crc kubenswrapper[4632]: I1201 07:39:25.411493 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-t8g5z/must-gather-46pfq"] Dec 01 07:39:25 crc kubenswrapper[4632]: I1201 07:39:25.412451 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-t8g5z/must-gather-46pfq" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="copy" containerID="cri-o://3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153" gracePeriod=2 Dec 01 07:39:25 crc kubenswrapper[4632]: I1201 07:39:25.418133 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-t8g5z/must-gather-46pfq"] Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:25.768830 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t8g5z_must-gather-46pfq_6a303f25-5be2-46af-9812-5f9155a6e91c/copy/0.log" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:25.770162 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:25.899748 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dhjw2\" (UniqueName: \"kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2\") pod \"6a303f25-5be2-46af-9812-5f9155a6e91c\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:25.899852 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output\") pod \"6a303f25-5be2-46af-9812-5f9155a6e91c\" (UID: \"6a303f25-5be2-46af-9812-5f9155a6e91c\") " Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:25.909900 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2" (OuterVolumeSpecName: "kube-api-access-dhjw2") pod "6a303f25-5be2-46af-9812-5f9155a6e91c" (UID: "6a303f25-5be2-46af-9812-5f9155a6e91c"). InnerVolumeSpecName "kube-api-access-dhjw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.002796 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dhjw2\" (UniqueName: \"kubernetes.io/projected/6a303f25-5be2-46af-9812-5f9155a6e91c-kube-api-access-dhjw2\") on node \"crc\" DevicePath \"\"" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.034620 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6a303f25-5be2-46af-9812-5f9155a6e91c" (UID: "6a303f25-5be2-46af-9812-5f9155a6e91c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.105426 4632 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6a303f25-5be2-46af-9812-5f9155a6e91c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.514468 4632 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-t8g5z_must-gather-46pfq_6a303f25-5be2-46af-9812-5f9155a6e91c/copy/0.log" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.515141 4632 generic.go:334] "Generic (PLEG): container finished" podID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerID="3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153" exitCode=143 Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.515204 4632 scope.go:117] "RemoveContainer" containerID="3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.515328 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-t8g5z/must-gather-46pfq" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.532761 4632 scope.go:117] "RemoveContainer" containerID="85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.597505 4632 scope.go:117] "RemoveContainer" containerID="3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153" Dec 01 07:39:26 crc kubenswrapper[4632]: E1201 07:39:26.597969 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153\": container with ID starting with 3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153 not found: ID does not exist" containerID="3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.598049 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153"} err="failed to get container status \"3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153\": rpc error: code = NotFound desc = could not find container \"3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153\": container with ID starting with 3fc0730268ce117279b13d7c185b5aa23ac258fe13d845bb276534ffc8354153 not found: ID does not exist" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.598083 4632 scope.go:117] "RemoveContainer" containerID="85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb" Dec 01 07:39:26 crc kubenswrapper[4632]: E1201 07:39:26.598518 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb\": container with ID starting with 85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb not found: ID does not exist" containerID="85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.598544 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb"} err="failed to get container status \"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb\": rpc error: code = NotFound desc = could not find container \"85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb\": container with ID starting with 85c0c0dac34724a01e10c4b076ced3732163e08969b1ccbaa233837c227aa2fb not found: ID does not exist" Dec 01 07:39:26 crc kubenswrapper[4632]: I1201 07:39:26.760975 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" path="/var/lib/kubelet/pods/6a303f25-5be2-46af-9812-5f9155a6e91c/volumes" Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.497738 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.499258 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.499319 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.500269 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.500330 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2" gracePeriod=600 Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.713535 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2" exitCode=0 Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.713580 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2"} Dec 01 07:39:49 crc kubenswrapper[4632]: I1201 07:39:49.713841 4632 scope.go:117] "RemoveContainer" containerID="99f852c9570d4ab640534e7f1f0ecae39160bd834b7ebc818234d4c8b8126689" Dec 01 07:39:50 crc kubenswrapper[4632]: I1201 07:39:50.723274 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerStarted","Data":"2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c"} Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.611781 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:38 crc kubenswrapper[4632]: E1201 07:40:38.613196 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="extract-content" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613235 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="extract-content" Dec 01 07:40:38 crc kubenswrapper[4632]: E1201 07:40:38.613260 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="gather" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613267 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="gather" Dec 01 07:40:38 crc kubenswrapper[4632]: E1201 07:40:38.613292 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="registry-server" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613308 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="registry-server" Dec 01 07:40:38 crc kubenswrapper[4632]: E1201 07:40:38.613326 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="extract-utilities" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613332 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="extract-utilities" Dec 01 07:40:38 crc kubenswrapper[4632]: E1201 07:40:38.613370 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="copy" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613376 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="copy" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613709 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="77414e08-7005-4510-8c7b-e17c36f81cb7" containerName="registry-server" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613719 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="copy" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.613735 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a303f25-5be2-46af-9812-5f9155a6e91c" containerName="gather" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.615291 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.621293 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.626765 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.626818 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nkxcg\" (UniqueName: \"kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.626914 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.729830 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.729935 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.729978 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nkxcg\" (UniqueName: \"kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.730455 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.730585 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.748996 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nkxcg\" (UniqueName: \"kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg\") pod \"redhat-operators-6jh6t\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:38 crc kubenswrapper[4632]: I1201 07:40:38.939438 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:39 crc kubenswrapper[4632]: I1201 07:40:39.386464 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:40 crc kubenswrapper[4632]: I1201 07:40:40.151860 4632 generic.go:334] "Generic (PLEG): container finished" podID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerID="d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354" exitCode=0 Dec 01 07:40:40 crc kubenswrapper[4632]: I1201 07:40:40.152051 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerDied","Data":"d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354"} Dec 01 07:40:40 crc kubenswrapper[4632]: I1201 07:40:40.152216 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerStarted","Data":"69454c48ddd0283ff099e9afa45ca64ac6fe2143f486921be2a9e624ae7fc91f"} Dec 01 07:40:41 crc kubenswrapper[4632]: I1201 07:40:41.163336 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerStarted","Data":"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5"} Dec 01 07:40:42 crc kubenswrapper[4632]: I1201 07:40:42.172677 4632 generic.go:334] "Generic (PLEG): container finished" podID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerID="7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5" exitCode=0 Dec 01 07:40:42 crc kubenswrapper[4632]: I1201 07:40:42.172777 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerDied","Data":"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5"} Dec 01 07:40:43 crc kubenswrapper[4632]: I1201 07:40:43.184830 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerStarted","Data":"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817"} Dec 01 07:40:43 crc kubenswrapper[4632]: I1201 07:40:43.207424 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6jh6t" podStartSLOduration=2.542098678 podStartE2EDuration="5.207400638s" podCreationTimestamp="2025-12-01 07:40:38 +0000 UTC" firstStartedPulling="2025-12-01 07:40:40.154001972 +0000 UTC m=+3449.719014945" lastFinishedPulling="2025-12-01 07:40:42.819303932 +0000 UTC m=+3452.384316905" observedRunningTime="2025-12-01 07:40:43.202512607 +0000 UTC m=+3452.767525581" watchObservedRunningTime="2025-12-01 07:40:43.207400638 +0000 UTC m=+3452.772413611" Dec 01 07:40:48 crc kubenswrapper[4632]: I1201 07:40:48.940372 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:48 crc kubenswrapper[4632]: I1201 07:40:48.941276 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:48 crc kubenswrapper[4632]: I1201 07:40:48.977492 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:49 crc kubenswrapper[4632]: I1201 07:40:49.270638 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:49 crc kubenswrapper[4632]: I1201 07:40:49.311739 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.250661 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6jh6t" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="registry-server" containerID="cri-o://014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817" gracePeriod=2 Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.628694 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.781496 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nkxcg\" (UniqueName: \"kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg\") pod \"f97f156b-141b-4367-932a-b14e09cb0c7f\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.781797 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities\") pod \"f97f156b-141b-4367-932a-b14e09cb0c7f\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.781960 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content\") pod \"f97f156b-141b-4367-932a-b14e09cb0c7f\" (UID: \"f97f156b-141b-4367-932a-b14e09cb0c7f\") " Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.782654 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities" (OuterVolumeSpecName: "utilities") pod "f97f156b-141b-4367-932a-b14e09cb0c7f" (UID: "f97f156b-141b-4367-932a-b14e09cb0c7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.787121 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg" (OuterVolumeSpecName: "kube-api-access-nkxcg") pod "f97f156b-141b-4367-932a-b14e09cb0c7f" (UID: "f97f156b-141b-4367-932a-b14e09cb0c7f"). InnerVolumeSpecName "kube-api-access-nkxcg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.883992 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nkxcg\" (UniqueName: \"kubernetes.io/projected/f97f156b-141b-4367-932a-b14e09cb0c7f-kube-api-access-nkxcg\") on node \"crc\" DevicePath \"\"" Dec 01 07:40:51 crc kubenswrapper[4632]: I1201 07:40:51.884018 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.259133 4632 generic.go:334] "Generic (PLEG): container finished" podID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerID="014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817" exitCode=0 Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.259183 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerDied","Data":"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817"} Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.259194 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6jh6t" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.259224 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6jh6t" event={"ID":"f97f156b-141b-4367-932a-b14e09cb0c7f","Type":"ContainerDied","Data":"69454c48ddd0283ff099e9afa45ca64ac6fe2143f486921be2a9e624ae7fc91f"} Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.259246 4632 scope.go:117] "RemoveContainer" containerID="014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.276825 4632 scope.go:117] "RemoveContainer" containerID="7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.291894 4632 scope.go:117] "RemoveContainer" containerID="d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.328834 4632 scope.go:117] "RemoveContainer" containerID="014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817" Dec 01 07:40:52 crc kubenswrapper[4632]: E1201 07:40:52.329222 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817\": container with ID starting with 014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817 not found: ID does not exist" containerID="014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.329254 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817"} err="failed to get container status \"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817\": rpc error: code = NotFound desc = could not find container \"014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817\": container with ID starting with 014704f5011ee572c82fed9c50be8bbb07f4758930429d19e3f41bec9d2e2817 not found: ID does not exist" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.329278 4632 scope.go:117] "RemoveContainer" containerID="7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5" Dec 01 07:40:52 crc kubenswrapper[4632]: E1201 07:40:52.329553 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5\": container with ID starting with 7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5 not found: ID does not exist" containerID="7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.329574 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5"} err="failed to get container status \"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5\": rpc error: code = NotFound desc = could not find container \"7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5\": container with ID starting with 7e7214a0818053e7e307b1e2f2c1a48292a8ecef3ce776288891075ca8911dd5 not found: ID does not exist" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.329587 4632 scope.go:117] "RemoveContainer" containerID="d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354" Dec 01 07:40:52 crc kubenswrapper[4632]: E1201 07:40:52.329840 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354\": container with ID starting with d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354 not found: ID does not exist" containerID="d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.329857 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354"} err="failed to get container status \"d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354\": rpc error: code = NotFound desc = could not find container \"d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354\": container with ID starting with d73d7613d66bd5a4620f7d7792736bea1c94bf18a9068c5afcf0819a270e0354 not found: ID does not exist" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.741596 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f97f156b-141b-4367-932a-b14e09cb0c7f" (UID: "f97f156b-141b-4367-932a-b14e09cb0c7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.799505 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f97f156b-141b-4367-932a-b14e09cb0c7f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.878182 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:52 crc kubenswrapper[4632]: I1201 07:40:52.893301 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6jh6t"] Dec 01 07:40:54 crc kubenswrapper[4632]: I1201 07:40:54.758880 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" path="/var/lib/kubelet/pods/f97f156b-141b-4367-932a-b14e09cb0c7f/volumes" Dec 01 07:41:49 crc kubenswrapper[4632]: I1201 07:41:49.498377 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:41:49 crc kubenswrapper[4632]: I1201 07:41:49.498764 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.333538 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:41:51 crc kubenswrapper[4632]: E1201 07:41:51.334104 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="extract-utilities" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.334123 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="extract-utilities" Dec 01 07:41:51 crc kubenswrapper[4632]: E1201 07:41:51.334157 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="registry-server" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.334163 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="registry-server" Dec 01 07:41:51 crc kubenswrapper[4632]: E1201 07:41:51.334173 4632 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="extract-content" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.334180 4632 state_mem.go:107] "Deleted CPUSet assignment" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="extract-content" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.334390 4632 memory_manager.go:354] "RemoveStaleState removing state" podUID="f97f156b-141b-4367-932a-b14e09cb0c7f" containerName="registry-server" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.335699 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.349137 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.406915 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2z5n\" (UniqueName: \"kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.407199 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.407230 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.508714 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2z5n\" (UniqueName: \"kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.508914 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.508937 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.509343 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.509457 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.524544 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2z5n\" (UniqueName: \"kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n\") pod \"redhat-marketplace-q2twj\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:51 crc kubenswrapper[4632]: I1201 07:41:51.658199 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:41:52 crc kubenswrapper[4632]: I1201 07:41:52.089514 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:41:52 crc kubenswrapper[4632]: I1201 07:41:52.729762 4632 generic.go:334] "Generic (PLEG): container finished" podID="37864ce8-b286-4d3b-b21e-e2980fe0316f" containerID="0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a" exitCode=0 Dec 01 07:41:52 crc kubenswrapper[4632]: I1201 07:41:52.729815 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerDied","Data":"0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a"} Dec 01 07:41:52 crc kubenswrapper[4632]: I1201 07:41:52.729868 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerStarted","Data":"805770f0d1dd71b100fb88c78fafa9f36bf290f893556fbe3d0c7c964a2f94cc"} Dec 01 07:41:54 crc kubenswrapper[4632]: I1201 07:41:54.745511 4632 generic.go:334] "Generic (PLEG): container finished" podID="37864ce8-b286-4d3b-b21e-e2980fe0316f" containerID="3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42" exitCode=0 Dec 01 07:41:54 crc kubenswrapper[4632]: I1201 07:41:54.745614 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerDied","Data":"3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42"} Dec 01 07:41:55 crc kubenswrapper[4632]: I1201 07:41:55.756046 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerStarted","Data":"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5"} Dec 01 07:41:55 crc kubenswrapper[4632]: I1201 07:41:55.776851 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-q2twj" podStartSLOduration=2.200428816 podStartE2EDuration="4.776833299s" podCreationTimestamp="2025-12-01 07:41:51 +0000 UTC" firstStartedPulling="2025-12-01 07:41:52.733259407 +0000 UTC m=+3522.298272380" lastFinishedPulling="2025-12-01 07:41:55.30966389 +0000 UTC m=+3524.874676863" observedRunningTime="2025-12-01 07:41:55.769069431 +0000 UTC m=+3525.334082404" watchObservedRunningTime="2025-12-01 07:41:55.776833299 +0000 UTC m=+3525.341846273" Dec 01 07:42:01 crc kubenswrapper[4632]: I1201 07:42:01.658398 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:01 crc kubenswrapper[4632]: I1201 07:42:01.659119 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:01 crc kubenswrapper[4632]: I1201 07:42:01.697825 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:01 crc kubenswrapper[4632]: I1201 07:42:01.841003 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.771674 4632 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.774376 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.783329 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.822523 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.822626 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.822789 4632 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cs8gv\" (UniqueName: \"kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.924790 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.924875 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cs8gv\" (UniqueName: \"kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.924996 4632 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.925664 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.925680 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:03 crc kubenswrapper[4632]: I1201 07:42:03.944853 4632 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cs8gv\" (UniqueName: \"kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv\") pod \"community-operators-gh6lg\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.092334 4632 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.544821 4632 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.830392 4632 generic.go:334] "Generic (PLEG): container finished" podID="9bfede23-df69-4654-9e59-a70e67e118bc" containerID="33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d" exitCode=0 Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.830468 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerDied","Data":"33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d"} Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.830733 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerStarted","Data":"dd6217e6516d8924d36ff7f91f1486ebb0c7768670740fd53a7349f3744a9bc5"} Dec 01 07:42:04 crc kubenswrapper[4632]: I1201 07:42:04.832447 4632 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.161928 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.162473 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-q2twj" podUID="37864ce8-b286-4d3b-b21e-e2980fe0316f" containerName="registry-server" containerID="cri-o://bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5" gracePeriod=2 Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.572957 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.600471 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2z5n\" (UniqueName: \"kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n\") pod \"37864ce8-b286-4d3b-b21e-e2980fe0316f\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.600534 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities\") pod \"37864ce8-b286-4d3b-b21e-e2980fe0316f\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.600661 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content\") pod \"37864ce8-b286-4d3b-b21e-e2980fe0316f\" (UID: \"37864ce8-b286-4d3b-b21e-e2980fe0316f\") " Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.603334 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities" (OuterVolumeSpecName: "utilities") pod "37864ce8-b286-4d3b-b21e-e2980fe0316f" (UID: "37864ce8-b286-4d3b-b21e-e2980fe0316f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.610454 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n" (OuterVolumeSpecName: "kube-api-access-d2z5n") pod "37864ce8-b286-4d3b-b21e-e2980fe0316f" (UID: "37864ce8-b286-4d3b-b21e-e2980fe0316f"). InnerVolumeSpecName "kube-api-access-d2z5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.625489 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37864ce8-b286-4d3b-b21e-e2980fe0316f" (UID: "37864ce8-b286-4d3b-b21e-e2980fe0316f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.704918 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.704952 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2z5n\" (UniqueName: \"kubernetes.io/projected/37864ce8-b286-4d3b-b21e-e2980fe0316f-kube-api-access-d2z5n\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.704965 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37864ce8-b286-4d3b-b21e-e2980fe0316f-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.872648 4632 generic.go:334] "Generic (PLEG): container finished" podID="9bfede23-df69-4654-9e59-a70e67e118bc" containerID="fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058" exitCode=0 Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.872725 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerDied","Data":"fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058"} Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.875620 4632 generic.go:334] "Generic (PLEG): container finished" podID="37864ce8-b286-4d3b-b21e-e2980fe0316f" containerID="bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5" exitCode=0 Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.875665 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerDied","Data":"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5"} Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.875693 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-q2twj" event={"ID":"37864ce8-b286-4d3b-b21e-e2980fe0316f","Type":"ContainerDied","Data":"805770f0d1dd71b100fb88c78fafa9f36bf290f893556fbe3d0c7c964a2f94cc"} Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.875701 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-q2twj" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.875714 4632 scope.go:117] "RemoveContainer" containerID="bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.900444 4632 scope.go:117] "RemoveContainer" containerID="3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.914964 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.919396 4632 scope.go:117] "RemoveContainer" containerID="0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.920183 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-q2twj"] Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.949861 4632 scope.go:117] "RemoveContainer" containerID="bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5" Dec 01 07:42:06 crc kubenswrapper[4632]: E1201 07:42:06.950276 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5\": container with ID starting with bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5 not found: ID does not exist" containerID="bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.950333 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5"} err="failed to get container status \"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5\": rpc error: code = NotFound desc = could not find container \"bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5\": container with ID starting with bf254da90c303e3628a56f7bbb85de3810068b65b62112396268dab91a15f3f5 not found: ID does not exist" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.950401 4632 scope.go:117] "RemoveContainer" containerID="3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42" Dec 01 07:42:06 crc kubenswrapper[4632]: E1201 07:42:06.950859 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42\": container with ID starting with 3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42 not found: ID does not exist" containerID="3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.951626 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42"} err="failed to get container status \"3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42\": rpc error: code = NotFound desc = could not find container \"3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42\": container with ID starting with 3c6d989e37264c9fc1c23039bd1e5beb496c941784489d34d60dfc8ce769be42 not found: ID does not exist" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.951669 4632 scope.go:117] "RemoveContainer" containerID="0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a" Dec 01 07:42:06 crc kubenswrapper[4632]: E1201 07:42:06.951959 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a\": container with ID starting with 0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a not found: ID does not exist" containerID="0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a" Dec 01 07:42:06 crc kubenswrapper[4632]: I1201 07:42:06.951988 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a"} err="failed to get container status \"0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a\": rpc error: code = NotFound desc = could not find container \"0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a\": container with ID starting with 0cda6e85f36adbf0dc0d8919cb319370f27bd783e7d1efd0a5a2c62ff2cd024a not found: ID does not exist" Dec 01 07:42:06 crc kubenswrapper[4632]: E1201 07:42:06.985303 4632 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37864ce8_b286_4d3b_b21e_e2980fe0316f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37864ce8_b286_4d3b_b21e_e2980fe0316f.slice/crio-805770f0d1dd71b100fb88c78fafa9f36bf290f893556fbe3d0c7c964a2f94cc\": RecentStats: unable to find data in memory cache]" Dec 01 07:42:07 crc kubenswrapper[4632]: I1201 07:42:07.885681 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerStarted","Data":"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f"} Dec 01 07:42:07 crc kubenswrapper[4632]: I1201 07:42:07.908128 4632 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gh6lg" podStartSLOduration=2.322780291 podStartE2EDuration="4.908111574s" podCreationTimestamp="2025-12-01 07:42:03 +0000 UTC" firstStartedPulling="2025-12-01 07:42:04.832112655 +0000 UTC m=+3534.397125628" lastFinishedPulling="2025-12-01 07:42:07.417443938 +0000 UTC m=+3536.982456911" observedRunningTime="2025-12-01 07:42:07.905130036 +0000 UTC m=+3537.470143009" watchObservedRunningTime="2025-12-01 07:42:07.908111574 +0000 UTC m=+3537.473124547" Dec 01 07:42:08 crc kubenswrapper[4632]: I1201 07:42:08.758898 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37864ce8-b286-4d3b-b21e-e2980fe0316f" path="/var/lib/kubelet/pods/37864ce8-b286-4d3b-b21e-e2980fe0316f/volumes" Dec 01 07:42:14 crc kubenswrapper[4632]: I1201 07:42:14.093001 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:14 crc kubenswrapper[4632]: I1201 07:42:14.093716 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:14 crc kubenswrapper[4632]: I1201 07:42:14.131442 4632 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:14 crc kubenswrapper[4632]: I1201 07:42:14.990909 4632 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:16 crc kubenswrapper[4632]: I1201 07:42:16.163422 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:16 crc kubenswrapper[4632]: I1201 07:42:16.957563 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gh6lg" podUID="9bfede23-df69-4654-9e59-a70e67e118bc" containerName="registry-server" containerID="cri-o://cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f" gracePeriod=2 Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.358734 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.401792 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities\") pod \"9bfede23-df69-4654-9e59-a70e67e118bc\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.401865 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cs8gv\" (UniqueName: \"kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv\") pod \"9bfede23-df69-4654-9e59-a70e67e118bc\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.401922 4632 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content\") pod \"9bfede23-df69-4654-9e59-a70e67e118bc\" (UID: \"9bfede23-df69-4654-9e59-a70e67e118bc\") " Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.403129 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities" (OuterVolumeSpecName: "utilities") pod "9bfede23-df69-4654-9e59-a70e67e118bc" (UID: "9bfede23-df69-4654-9e59-a70e67e118bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.412660 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv" (OuterVolumeSpecName: "kube-api-access-cs8gv") pod "9bfede23-df69-4654-9e59-a70e67e118bc" (UID: "9bfede23-df69-4654-9e59-a70e67e118bc"). InnerVolumeSpecName "kube-api-access-cs8gv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.442639 4632 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9bfede23-df69-4654-9e59-a70e67e118bc" (UID: "9bfede23-df69-4654-9e59-a70e67e118bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.505439 4632 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-utilities\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.505468 4632 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cs8gv\" (UniqueName: \"kubernetes.io/projected/9bfede23-df69-4654-9e59-a70e67e118bc-kube-api-access-cs8gv\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.505481 4632 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9bfede23-df69-4654-9e59-a70e67e118bc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.967691 4632 generic.go:334] "Generic (PLEG): container finished" podID="9bfede23-df69-4654-9e59-a70e67e118bc" containerID="cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f" exitCode=0 Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.967762 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerDied","Data":"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f"} Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.967799 4632 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gh6lg" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.967834 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gh6lg" event={"ID":"9bfede23-df69-4654-9e59-a70e67e118bc","Type":"ContainerDied","Data":"dd6217e6516d8924d36ff7f91f1486ebb0c7768670740fd53a7349f3744a9bc5"} Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.967850 4632 scope.go:117] "RemoveContainer" containerID="cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f" Dec 01 07:42:17 crc kubenswrapper[4632]: I1201 07:42:17.999500 4632 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.004552 4632 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gh6lg"] Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.011837 4632 scope.go:117] "RemoveContainer" containerID="fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.037549 4632 scope.go:117] "RemoveContainer" containerID="33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.068167 4632 scope.go:117] "RemoveContainer" containerID="cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f" Dec 01 07:42:18 crc kubenswrapper[4632]: E1201 07:42:18.068788 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f\": container with ID starting with cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f not found: ID does not exist" containerID="cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.068911 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f"} err="failed to get container status \"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f\": rpc error: code = NotFound desc = could not find container \"cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f\": container with ID starting with cc6214f09f82ccd6901b7d5c1df28306b681ba4a3969545bade6e23b5b32ab7f not found: ID does not exist" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.069023 4632 scope.go:117] "RemoveContainer" containerID="fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058" Dec 01 07:42:18 crc kubenswrapper[4632]: E1201 07:42:18.069435 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058\": container with ID starting with fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058 not found: ID does not exist" containerID="fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.069542 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058"} err="failed to get container status \"fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058\": rpc error: code = NotFound desc = could not find container \"fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058\": container with ID starting with fd9c4d97cd0860bc3d573a2211979fdd5e62f5534b1ae77636c456de18a88058 not found: ID does not exist" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.069607 4632 scope.go:117] "RemoveContainer" containerID="33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d" Dec 01 07:42:18 crc kubenswrapper[4632]: E1201 07:42:18.069982 4632 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d\": container with ID starting with 33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d not found: ID does not exist" containerID="33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.070022 4632 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d"} err="failed to get container status \"33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d\": rpc error: code = NotFound desc = could not find container \"33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d\": container with ID starting with 33048028badeb29bdaaae1ab4adb2a9e1dc0e34ef20bd4f62eb27a16bc7f272d not found: ID does not exist" Dec 01 07:42:18 crc kubenswrapper[4632]: I1201 07:42:18.760218 4632 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bfede23-df69-4654-9e59-a70e67e118bc" path="/var/lib/kubelet/pods/9bfede23-df69-4654-9e59-a70e67e118bc/volumes" Dec 01 07:42:19 crc kubenswrapper[4632]: I1201 07:42:19.497900 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:42:19 crc kubenswrapper[4632]: I1201 07:42:19.498266 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:42:21 crc kubenswrapper[4632]: I1201 07:42:21.735117 4632 scope.go:117] "RemoveContainer" containerID="ad3d6a0a0e88cf95a237e8f79577e25ffd77344b39db66738049cdcad16ed61b" Dec 01 07:42:49 crc kubenswrapper[4632]: I1201 07:42:49.498443 4632 patch_prober.go:28] interesting pod/machine-config-daemon-gm9xs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 01 07:42:49 crc kubenswrapper[4632]: I1201 07:42:49.499199 4632 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 01 07:42:49 crc kubenswrapper[4632]: I1201 07:42:49.499276 4632 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" Dec 01 07:42:49 crc kubenswrapper[4632]: I1201 07:42:49.499957 4632 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c"} pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 01 07:42:49 crc kubenswrapper[4632]: I1201 07:42:49.500026 4632 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerName="machine-config-daemon" containerID="cri-o://2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c" gracePeriod=600 Dec 01 07:42:49 crc kubenswrapper[4632]: E1201 07:42:49.624157 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" Dec 01 07:42:50 crc kubenswrapper[4632]: I1201 07:42:50.273191 4632 generic.go:334] "Generic (PLEG): container finished" podID="168bb8aa-8b43-44df-836f-90d6d52f1539" containerID="2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c" exitCode=0 Dec 01 07:42:50 crc kubenswrapper[4632]: I1201 07:42:50.273236 4632 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" event={"ID":"168bb8aa-8b43-44df-836f-90d6d52f1539","Type":"ContainerDied","Data":"2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c"} Dec 01 07:42:50 crc kubenswrapper[4632]: I1201 07:42:50.273282 4632 scope.go:117] "RemoveContainer" containerID="6bd77611f8ce655c502bf7a7d55064856d8c28e6ff4f924c8c321dab8d172bd2" Dec 01 07:42:50 crc kubenswrapper[4632]: I1201 07:42:50.273698 4632 scope.go:117] "RemoveContainer" containerID="2dcdc54947fded2608d810998eb7cdbcae0ece12a5aefa5f444e9f2d4f62b44c" Dec 01 07:42:50 crc kubenswrapper[4632]: E1201 07:42:50.274002 4632 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-gm9xs_openshift-machine-config-operator(168bb8aa-8b43-44df-836f-90d6d52f1539)\"" pod="openshift-machine-config-operator/machine-config-daemon-gm9xs" podUID="168bb8aa-8b43-44df-836f-90d6d52f1539" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515113243404024442 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015113243405017360 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015113234164016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015113234164015455 5ustar corecore